diff --git a/.dockerignore b/.dockerignore index cbfb4765f0e6d..5ff397890187a 100644 --- a/.dockerignore +++ b/.dockerignore @@ -39,3 +39,11 @@ !test-runner-jest.config.js !test-runner-jest-environment.js !patches +!rust +rust/.env +rust/.github +rust/docker +rust/target +rust/cyclotron-node/dist +rust/cyclotron-node/node_modules +rust/cyclotron-node/index.node diff --git a/.github/actions/run-backend-tests/action.yml b/.github/actions/run-backend-tests/action.yml index 79db7987e7782..7bbe4b5147942 100644 --- a/.github/actions/run-backend-tests/action.yml +++ b/.github/actions/run-backend-tests/action.yml @@ -202,7 +202,7 @@ runs: - name: Upload updated timing data as artifacts uses: actions/upload-artifact@v4 - if: ${{ inputs.person-on-events != 'true' && inputs.clickhouse-server-image == 'clickhouse/clickhouse-server:23.12.5.81-alpine' }} + if: ${{ inputs.person-on-events != 'true' && inputs.clickhouse-server-image == 'clickhouse/clickhouse-server:23.12.6.19-alpine' }} with: name: timing_data-${{ inputs.segment }}-${{ inputs.group }} path: .test_durations diff --git a/.github/workflows/ci-backend-update-test-timing.yml b/.github/workflows/ci-backend-update-test-timing.yml index 01ad7d33ce305..2d722584cc95e 100644 --- a/.github/workflows/ci-backend-update-test-timing.yml +++ b/.github/workflows/ci-backend-update-test-timing.yml @@ -29,13 +29,13 @@ jobs: group: 1 token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} python-version: '3.11.9' - clickhouse-server-image: 'clickhouse/clickhouse-server:23.12.5.81-alpine' + clickhouse-server-image: 'clickhouse/clickhouse-server:23.12.6.19-alpine' segment: 'FOSS' person-on-events: false - name: Upload updated timing data as artifacts uses: actions/upload-artifact@v4 - if: ${{ inputs.person-on-events != 'true' && inputs.clickhouse-server-image == 'clickhouse/clickhouse-server:23.12.5.81-alpine' }} + if: ${{ inputs.person-on-events != 'true' && inputs.clickhouse-server-image == 'clickhouse/clickhouse-server:23.12.6.19-alpine' }} with: name: timing_data-${{ inputs.segment }}-${{ inputs.group }} path: .test_durations diff --git a/.github/workflows/ci-backend.yml b/.github/workflows/ci-backend.yml index 9f7afb3e2ccb5..6155740676e03 100644 --- a/.github/workflows/ci-backend.yml +++ b/.github/workflows/ci-backend.yml @@ -233,7 +233,7 @@ jobs: fail-fast: false matrix: python-version: ['3.11.9'] - clickhouse-server-image: ['clickhouse/clickhouse-server:23.12.5.81-alpine'] + clickhouse-server-image: ['clickhouse/clickhouse-server:23.12.6.19-alpine'] segment: ['Core'] person-on-events: [false, true] # :NOTE: Keep concurrency and groups in sync @@ -242,7 +242,7 @@ jobs: include: - segment: 'Temporal' person-on-events: false - clickhouse-server-image: 'clickhouse/clickhouse-server:23.12.5.81-alpine' + clickhouse-server-image: 'clickhouse/clickhouse-server:23.12.6.19-alpine' python-version: '3.11.9' concurrency: 1 group: 1 @@ -313,7 +313,7 @@ jobs: strategy: fail-fast: false matrix: - clickhouse-server-image: ['clickhouse/clickhouse-server:23.12.5.81-alpine'] + clickhouse-server-image: ['clickhouse/clickhouse-server:23.12.6.19-alpine'] if: needs.changes.outputs.backend == 'true' runs-on: ubuntu-latest steps: diff --git a/.github/workflows/ci-hog.yml b/.github/workflows/ci-hog.yml index 6f4c326506da6..5733393905e49 100644 --- a/.github/workflows/ci-hog.yml +++ b/.github/workflows/ci-hog.yml @@ -118,9 +118,9 @@ jobs: antlr | grep "Version" npm run grammar:build && git diff --exit-code env: - # Installing a version of ANTLR compatible with what's in Homebrew as of October 2023 (version 4.13), + # Installing a version of ANTLR compatible with what's in Homebrew as of August 2024 (version 4.13.2), # as apt-get is quite out of date. The same version must be set in hogql_parser/pyproject.toml - ANTLR_VERSION: '4.13.1' + ANTLR_VERSION: '4.13.2' - name: Run HogVM Python tests if: needs.changes.outputs.hog == 'true' diff --git a/.github/workflows/container-images-cd.yml b/.github/workflows/container-images-cd.yml index 18888e9e3b301..06741577bc8ba 100644 --- a/.github/workflows/container-images-cd.yml +++ b/.github/workflows/container-images-cd.yml @@ -99,7 +99,8 @@ jobs: "release": "posthog", "commit": ${{ toJson(github.event.head_commit) }}, "repository": ${{ toJson(github.repository) }}, - "labels": ${{ steps.labels.outputs.labels }} + "labels": ${{ steps.labels.outputs.labels }}, + "timestamp": "${{ github.event.head_commit.timestamp }}" } - name: Check for changes in plugins directory @@ -124,7 +125,8 @@ jobs: "release": "ingestion", "commit": ${{ toJson(github.event.head_commit) }}, "repository": ${{ toJson(github.repository) }}, - "labels": ${{ toJson(steps.labels.outputs.labels) }} + "labels": ${{ toJson(steps.labels.outputs.labels) }}, + "timestamp": "${{ github.event.head_commit.timestamp }}" } - name: Check for changes that affect batch exports temporal worker @@ -149,7 +151,8 @@ jobs: "release": "temporal-worker", "commit": ${{ toJson(github.event.head_commit) }}, "repository": ${{ toJson(github.repository) }}, - "labels": ${{ steps.labels.outputs.labels }} + "labels": ${{ steps.labels.outputs.labels }}, + "timestamp": "${{ github.event.head_commit.timestamp }}" } - name: Check for changes that affect general purpose temporal worker @@ -174,7 +177,8 @@ jobs: "release": "temporal-worker-general-purpose", "commit": ${{ toJson(github.event.head_commit) }}, "repository": ${{ toJson(github.repository) }}, - "labels": ${{ steps.labels.outputs.labels }} + "labels": ${{ steps.labels.outputs.labels }}, + "timestamp": "${{ github.event.head_commit.timestamp }}" } - name: Check for changes that affect data warehouse temporal worker @@ -199,5 +203,6 @@ jobs: "release": "temporal-worker-data-warehouse", "commit": ${{ toJson(github.event.head_commit) }}, "repository": ${{ toJson(github.repository) }}, - "labels": ${{ steps.labels.outputs.labels }} + "labels": ${{ steps.labels.outputs.labels }}, + "timestamp": "${{ github.event.head_commit.timestamp }}" } diff --git a/.github/workflows/livestream-docker-image.yml b/.github/workflows/livestream-docker-image.yml index 231a76ddaf5f7..7023ee98c03f1 100644 --- a/.github/workflows/livestream-docker-image.yml +++ b/.github/workflows/livestream-docker-image.yml @@ -84,5 +84,6 @@ jobs: }, "release": "livestream", "commit": ${{ toJson(github.event.head_commit) }}, - "repository": ${{ toJson(github.repository) }} + "repository": ${{ toJson(github.repository) }}, + "timestamp": "${{ github.event.head_commit.timestamp }}" } diff --git a/.github/workflows/rust-docker-build.yml b/.github/workflows/rust-docker-build.yml index 95c7f3e061de2..2343df89289ee 100644 --- a/.github/workflows/rust-docker-build.yml +++ b/.github/workflows/rust-docker-build.yml @@ -1,4 +1,4 @@ -name: Build rust container images +name: Build and deploy rust container images on: workflow_dispatch: @@ -19,12 +19,18 @@ jobs: - hook-api - hook-janitor - hook-worker + - cyclotron-janitor + - cyclotron-fetch + - property-defs-rs runs-on: depot-ubuntu-22.04-4 permissions: id-token: write # allow issuing OIDC tokens for this workflow run contents: read # allow reading the repo contents packages: write # allow push to ghcr.io + outputs: + digest: ${{ steps.docker_build.outputs.digest }} + defaults: run: working-directory: rust @@ -88,3 +94,36 @@ jobs: - name: Container image digest run: echo ${{ steps.docker_build.outputs.digest }} + + deploy: + name: Deploy capture-replay + runs-on: ubuntu-latest + needs: build + if: github.ref == 'refs/heads/master' + steps: + - name: get deployer token + id: deployer + uses: getsentry/action-github-app-token@v3 + with: + app_id: ${{ secrets.DEPLOYER_APP_ID }} + private_key: ${{ secrets.DEPLOYER_APP_PRIVATE_KEY }} + + - name: Trigger livestream deployment + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ steps.deployer.outputs.token }} + repository: PostHog/charts + event-type: commit_state_update + client-payload: | + { + "values": { + "image": { + "sha": "${{ needs.build.outputs.digest }}" + } + }, + "release": "capture-replay", + "commit": ${{ toJson(github.event.head_commit) }}, + "repository": ${{ toJson(github.repository) }}, + "labels": [], + "timestamp": "${{ github.event.head_commit.timestamp }}" + } diff --git a/.github/workflows/rust-hook-migrator-docker.yml b/.github/workflows/rust-hook-migrator-docker.yml index 518374b6e241c..bc9c52a8ed339 100644 --- a/.github/workflows/rust-hook-migrator-docker.yml +++ b/.github/workflows/rust-hook-migrator-docker.yml @@ -69,7 +69,7 @@ jobs: uses: depot/build-push-action@v1 with: context: ./rust/ - file: ./rust/Dockerfile.migrate + file: ./rust/Dockerfile.migrate-hooks push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 43afd01f92532..f4d14e9ed49ce 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -34,6 +34,7 @@ jobs: - '.github/workflows/rust.yml' - '.github/workflows/rust-docker-build.yml' - '.github/workflows/rust-hook-migrator-docker.yml' + - '.github/workflows/rust-cyclotron-migrator-docker.yml' - 'posthog/management/commands/setup_test_environment.py' - 'posthog/migrations/**' - 'ee/migrations/**' @@ -164,11 +165,17 @@ jobs: DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/posthog' run: cd ../ && python manage.py setup_test_environment --only-postgres + - name: Download MaxMind Database + if: needs.changes.outputs.rust == 'true' + run: | + mkdir -p ../share + curl -L "https://mmdbcdn.posthog.net/" --http1.1 | brotli --decompress --output=../share/GeoLite2-City.mmdb + - name: Run cargo test if: needs.changes.outputs.rust == 'true' run: | echo "Starting cargo test" - cargo test --all-features ${{ matrix.package == 'feature-flags' && '--package feature-flags' || '--workspace --exclude feature-flags' }} + RUST_BACKTRACE=1 cargo test --all-features ${{ matrix.package == 'feature-flags' && '--package feature-flags' || '--workspace --exclude feature-flags' }} echo "Cargo test completed" linting: diff --git a/.github/workflows/vector-docker-build-deploy.yml b/.github/workflows/vector-docker-build-deploy.yml index be27aab697897..a1d97f846f298 100644 --- a/.github/workflows/vector-docker-build-deploy.yml +++ b/.github/workflows/vector-docker-build-deploy.yml @@ -101,5 +101,6 @@ jobs: "release": "replay-capture-vector", "commit": ${{ toJson(github.event.head_commit) }}, "repository": ${{ toJson(github.repository) }}, - "labels": [] + "labels": [], + "timestamp": "${{ github.event.head_commit.timestamp }}" } diff --git a/.gitignore b/.gitignore index bbd2fef53bc62..7470edb7eeb58 100644 --- a/.gitignore +++ b/.gitignore @@ -64,3 +64,5 @@ plugin-transpiler/dist *-esbuild-bundle-visualization.html .dlt *.db +# Ignore any log files that happen to be present +*.log \ No newline at end of file diff --git a/bin/start-cyclotron b/bin/start-cyclotron new file mode 100755 index 0000000000000..074ec4802d0a4 --- /dev/null +++ b/bin/start-cyclotron @@ -0,0 +1,21 @@ +#!/bin/bash + +set -ex + +trap "trap - SIGTERM && kill -- -$$" SIGINT SIGTERM EXIT + +cd rust + +cargo build + +export RUST_LOG=${DEBUG:-debug} +SQLX_QUERY_LEVEL=${SQLX_QUERY_LEVEL:-warn} +export RUST_LOG=$RUST_LOG,sqlx::query=$SQLX_QUERY_LEVEL + +export DATABASE_URL=${DATABASE_URL:-postgres://posthog:posthog@localhost:5432/posthog} +export ALLOW_INTERNAL_IPS=${ALLOW_INTERNAL_IPS:-true} + +./target/debug/cyclotron-fetch & +./target/debug/cyclotron-janitor & + +wait diff --git a/cypress/e2e/alerts.cy.ts b/cypress/e2e/alerts.cy.ts index ea2dd44380ab1..110647969e6a7 100644 --- a/cypress/e2e/alerts.cy.ts +++ b/cypress/e2e/alerts.cy.ts @@ -15,19 +15,19 @@ describe('Alerts', () => { const createAlert = ( name: string = 'Alert name', - email: string = 'a@b.c', lowerThreshold: string = '100', upperThreshold: string = '200' ): void => { cy.get('[data-attr=more-button]').click() - cy.contains('Alerts').click() + cy.contains('Manage alerts').click() cy.contains('New alert').click() cy.get('[data-attr=alert-name]').clear().type(name) - cy.get('[data-attr=alert-notification-targets').clear().type(email) + cy.get('[data-attr=subscribed-users').click().type('{downarrow}{enter}') cy.get('[data-attr=alert-lower-threshold').clear().type(lowerThreshold) cy.get('[data-attr=alert-upper-threshold').clear().type(upperThreshold) cy.contains('Create alert').click() + cy.get('.Toastify__toast-body').should('contain', 'Alert saved') cy.url().should('not.include', '/new') cy.get('[aria-label="close"]').click() @@ -38,6 +38,7 @@ describe('Alerts', () => { cy.get('[data-attr=insight-edit-button]').click() cy.get('[data-attr=chart-filter]').click() cy.contains(displayType).click() + cy.get('.insight-empty-state').should('not.exist') cy.get('[data-attr=insight-save-button]').contains('Save').click() cy.url().should('not.include', '/edit') } @@ -45,7 +46,7 @@ describe('Alerts', () => { it('Should allow create and delete an alert', () => { cy.get('[data-attr=more-button]').click() // Alerts should be disabled for trends represented with graphs - cy.get('[data-attr=disabled-alerts-button]').should('exist') + cy.get('[data-attr=manage-alerts-button]').should('have.attr', 'aria-disabled', 'true') setInsightDisplayTypeAndSave('Number') @@ -54,10 +55,8 @@ describe('Alerts', () => { // Check the alert has the same values as when it was created cy.get('[data-attr=more-button]').click() - cy.contains('Alerts').click() cy.contains('Manage alerts').click() cy.get('[data-attr=alert-list-item]').contains('Alert name').click() - cy.get('[data-attr=alert-notification-targets]').should('have.value', 'a@b.c') cy.get('[data-attr=alert-name]').should('have.value', 'Alert name') cy.get('[data-attr=alert-lower-threshold').should('have.value', '100') cy.get('[data-attr=alert-upper-threshold').should('have.value', '200') @@ -90,7 +89,6 @@ describe('Alerts', () => { // Assert that saving an insight in an incompatible state removes alerts cy.get('[data-attr=more-button]').click() - cy.contains('Alerts').click() cy.contains('Manage alerts').click() cy.contains('Alert to be deleted because of a changed insight').should('not.exist') }) diff --git a/cypress/e2e/billing-limits.cy.ts b/cypress/e2e/billing-limits.cy.ts index 179549caa4900..4f412004a0cc5 100644 --- a/cypress/e2e/billing-limits.cy.ts +++ b/cypress/e2e/billing-limits.cy.ts @@ -61,6 +61,38 @@ describe('Billing Limits', () => { ) }) + it('Show existing limit and allow user to change set to $0', () => { + cy.intercept('GET', '/api/billing/', (req) => { + req.reply({ + statusCode: 200, + body: { + ...require('../fixtures/api/billing/billing.json'), + custom_limits_usd: { product_analytics: 100 }, + }, + }) + }).as('getBilling') + cy.visit('/organization/billing') + cy.wait('@getBilling') + + cy.intercept('PATCH', '/api/billing/', (req) => { + req.reply({ + statusCode: 200, + body: { + ...require('../fixtures/api/billing/billing.json'), + custom_limits_usd: { product_analytics: 0 }, + }, + }) + }).as('patchBilling') + + cy.get('[data-attr="billing-limit-input-wrapper-product_analytics"]').scrollIntoView() + cy.get('[data-attr="billing-limit-set-product_analytics"]').should('be.visible') + cy.contains('Edit limit').click() + cy.get('[data-attr="billing-limit-input-product_analytics"]').clear().type('0') + cy.get('[data-attr="save-billing-limit-product_analytics"]').click() + cy.wait('@patchBilling') + cy.get('[data-attr="billing-limit-set-product_analytics"]').should('contain', 'You have a $0 billing limit set') + }) + it('Show existing limit and allow user to remove it', () => { cy.intercept('GET', '/api/billing/', (req) => { req.reply({ diff --git a/cypress/e2e/dashboard.cy.ts b/cypress/e2e/dashboard.cy.ts index 3af5333100eef..7919c3dded56a 100644 --- a/cypress/e2e/dashboard.cy.ts +++ b/cypress/e2e/dashboard.cy.ts @@ -95,7 +95,7 @@ describe('Dashboard', () => { // refresh the dashboard by changing date range cy.get('[data-attr="date-filter"]').click() cy.contains('span', 'Last 14 days').click() - cy.contains('span', 'Apply and save dashboard').click() + cy.contains('span', 'Save').click() cy.contains('span[class="text-primary text-sm font-medium"]', 'Refreshing').should('not.exist') cy.get('span').contains('Refreshing').should('not.exist') @@ -163,7 +163,7 @@ describe('Dashboard', () => { cy.get('[data-attr=date-filter]').contains('No date range override').click() cy.get('div').contains('Yesterday').should('exist').click() cy.get('[data-attr=date-filter]').contains('Yesterday') - cy.get('button').contains('Apply and save dashboard').click() + cy.get('button').contains('Save').click() cy.get('.InsightCard h5').should('have.length', 1).contains('Yesterday') // Cool, now back to A and make sure the insight is still using the original range there, not the one from B cy.clickNavMenu('dashboards') diff --git a/cypress/e2e/trends.cy.ts b/cypress/e2e/trends.cy.ts index b9b1fbba24680..ce8a6e8574b30 100644 --- a/cypress/e2e/trends.cy.ts +++ b/cypress/e2e/trends.cy.ts @@ -170,4 +170,8 @@ describe('Trends', () => { cy.get('[data-attr=math-monthly_active-0]').trigger('mouseenter') // Activate warning tooltip cy.get('.Tooltip').contains('we recommend using "Unique users" here instead').should('exist') }) + + it('Does not show delete button on single series insight', () => { + cy.get('[data-attr=delete-prop-filter-0]').should('not.exist') + }) }) diff --git a/cypress/productAnalytics/index.ts b/cypress/productAnalytics/index.ts index cedfa970c825f..af1a0c3f32e85 100644 --- a/cypress/productAnalytics/index.ts +++ b/cypress/productAnalytics/index.ts @@ -176,7 +176,7 @@ export const dashboard = { cy.get('[data-attr="prop-filter-event_properties-0"]').click({ force: true }).wait(1000) cy.get('.LemonInput').type(value) cy.contains('.LemonButton__content', value).click({ force: true }) - cy.get('button').contains('Apply and save dashboard').click() + cy.get('button').contains('Save').click() }, addAnyFilter(): void { cy.get('.PropertyFilterButton').should('have.length', 0) @@ -188,7 +188,7 @@ export const dashboard = { // click .dashboard to blur cy.get('.dashboard').click({ force: true }) cy.get('.PropertyFilterButton').should('have.length', 1) - cy.get('button').contains('Apply and save dashboard').click() + cy.get('button').contains('Save').click() }, } diff --git a/docker-compose.base.yml b/docker-compose.base.yml index 54ed4a505ffa1..3900cbc2d4815 100644 --- a/docker-compose.base.yml +++ b/docker-compose.base.yml @@ -34,7 +34,7 @@ services: } handle @replay-capture { - reverse_proxy replay-capture:8000 + reverse_proxy replay-capture:3000 } handle { @@ -79,7 +79,7 @@ services: # Note: please keep the default version in sync across # `posthog` and the `charts-clickhouse` repos # - image: ${CLICKHOUSE_SERVER_IMAGE:-clickhouse/clickhouse-server:23.12.5.81-alpine} + image: ${CLICKHOUSE_SERVER_IMAGE:-clickhouse/clickhouse-server:23.12.6.19-alpine} restart: on-failure zookeeper: @@ -155,32 +155,31 @@ services: capture: image: ghcr.io/posthog/posthog/capture:master + build: + context: rust/ + args: + BIN: capture restart: on-failure environment: ADDRESS: '0.0.0.0:3000' KAFKA_TOPIC: 'events_plugin_ingestion' KAFKA_HOSTS: 'kafka:9092' REDIS_URL: 'redis://redis:6379/' + CAPTURE_MODE: events replay-capture: - image: ghcr.io/posthog/posthog/replay-capture:master + image: ghcr.io/posthog/posthog/capture:master build: - context: vector/replay-capture + context: rust/ + args: + BIN: capture restart: on-failure - entrypoint: ['sh', '-c'] - command: - - | - set -x - # seed empty required data files - mkdir -p /etc/vector/data - echo "token" > /etc/vector/data/quota_limited_teams.csv - echo "session_id" > /etc/vector/data/overflow_sessions.csv - exec vector -v --watch-config environment: - KAFKA_EVENTS_TOPIC: session_recording_snapshot_item_events - KAFKA_OVERFLOW_TOPIC: session_recording_snapshot_item_overflow - KAFKA_BOOSTRAP_SERVERS: 'kafka:9092' + ADDRESS: '0.0.0.0:3000' + KAFKA_TOPIC: 'session_recording_snapshot_item_events' + KAFKA_HOSTS: 'kafka:9092' REDIS_URL: 'redis://redis:6379/' + CAPTURE_MODE: recordings plugins: command: ./bin/plugin-server --no-restart-loop diff --git a/docker/clickhouse/test_function.xml b/docker/clickhouse/test_function.xml index 2a390a521b6c7..725d9ba0210b7 100644 --- a/docker/clickhouse/test_function.xml +++ b/docker/clickhouse/test_function.xml @@ -3,6 +3,7 @@ executable aggregate_funnel Array(Tuple(Int8, Nullable(String), Array(Float64))) + result UInt8 num_steps @@ -27,7 +28,7 @@ Array(Tuple(Nullable(Float64), Nullable(String), Array(Int8))) value - TabSeparated + JSONEachRow aggregate_funnel.py @@ -35,6 +36,7 @@ executable aggregate_funnel_cohort Array(Tuple(Int8, UInt64, Array(Float64))) + result UInt8 num_steps @@ -59,14 +61,15 @@ Array(Tuple(Nullable(Float64), UInt64, Array(Int8))) value - TabSeparated - aggregate_funnel_cohort.py + JSONEachRow + aggregate_funnel.py executable aggregate_funnel_array Array(Tuple(Int8, Array(String), Array(Float64))) + result UInt8 num_steps @@ -91,14 +94,15 @@ Array(Tuple(Nullable(Float64), Array(String), Array(Int8))) value - TabSeparated - aggregate_funnel_array.py + JSONEachRow + aggregate_funnel.py executable aggregate_funnel_test String + result UInt8 num_steps @@ -123,7 +127,161 @@ Array(Tuple(Nullable(Float64), Nullable(String), Array(Int8))) value - TabSeparated - test_function.py + JSONEachRow + aggregate_funnel_test.py + + + + executable + aggregate_funnel_trends + Array(Tuple(DateTime, Int8, Nullable(String))) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(DateTime), Nullable(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_trends.py + + + + executable + aggregate_funnel_array_trends + + Array(Tuple(DateTime, Int8, Array(String))) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_trends.py + + + + executable + aggregate_funnel_cohort_trends + + Array(Tuple(DateTime, Int8, UInt64)) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(DateTime), UInt64, Array(Int8))) + value + + JSONEachRow + aggregate_funnel_trends.py + + + + executable + aggregate_funnel_array_trends_test + String + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_array_trends_test.py \ No newline at end of file diff --git a/ee/api/billing.py b/ee/api/billing.py index a5a8657ef6360..eecc838f650f7 100644 --- a/ee/api/billing.py +++ b/ee/api/billing.py @@ -7,7 +7,7 @@ from django.http import HttpResponse from django.shortcuts import redirect from rest_framework import serializers, status, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import NotFound, PermissionDenied, ValidationError from rest_framework.request import Request from rest_framework.response import Response diff --git a/ee/api/integration.py b/ee/api/integration.py index 8386e4271a126..3c155f54e241b 100644 --- a/ee/api/integration.py +++ b/ee/api/integration.py @@ -1,7 +1,7 @@ from typing import Any from rest_framework import viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import AuthenticationFailed from rest_framework.request import Request from rest_framework.response import Response diff --git a/ee/api/test/test_hooks.py b/ee/api/test/test_hooks.py index 2870b2ecce10b..0017079f4a77d 100644 --- a/ee/api/test/test_hooks.py +++ b/ee/api/test/test_hooks.py @@ -4,6 +4,7 @@ from ee.api.hooks import valid_domain from ee.api.test.base import APILicensedTest from ee.models.hook import Hook +from hogvm.python.operation import HOGQL_BYTECODE_VERSION from posthog.models.action.action import Action from posthog.models.hog_functions.hog_function import HogFunction from posthog.test.base import ClickhouseTestMixin @@ -113,7 +114,7 @@ def test_create_hog_function_via_hook(self): assert hog_function.filters == { "actions": [{"id": str(self.action.id), "name": "", "type": "actions", "order": 0}], - "bytecode": ["_h", 32, "$pageview", 32, "event", 1, 1, 11, 3, 1, 4, 1], + "bytecode": ["_H", HOGQL_BYTECODE_VERSION, 32, "$pageview", 32, "event", 1, 1, 11, 3, 1, 4, 1], } assert hog_function.inputs == { @@ -142,7 +143,8 @@ def test_create_hog_function_via_hook(self): "debug": {}, "hook": { "bytecode": [ - "_h", + "_H", + HOGQL_BYTECODE_VERSION, 32, "hooks/standard/1234/abcd", ], diff --git a/ee/clickhouse/models/test/test_action.py b/ee/clickhouse/models/test/test_action.py index 1c32ece5e1e94..7495d3eddc4e5 100644 --- a/ee/clickhouse/models/test/test_action.py +++ b/ee/clickhouse/models/test/test_action.py @@ -13,7 +13,7 @@ _create_event, _create_person, ) -from hogvm.python.operation import Operation as op, HOGQL_BYTECODE_IDENTIFIER as _H +from hogvm.python.operation import Operation as op, HOGQL_BYTECODE_IDENTIFIER as _H, HOGQL_BYTECODE_VERSION @dataclasses.dataclass @@ -289,6 +289,15 @@ def test_filter_with_hogql(self): action1.bytecode, [ _H, + HOGQL_BYTECODE_VERSION, + # event = 'insight viewed' + op.STRING, + "insight viewed", + op.STRING, + "event", + op.GET_GLOBAL, + 1, + op.EQ, # toInt(properties.filters_count) > 10 op.INTEGER, 10, @@ -298,18 +307,10 @@ def test_filter_with_hogql(self): "properties", op.GET_GLOBAL, 2, - op.CALL, + op.CALL_GLOBAL, "toInt", 1, op.GT, - # event = 'insight viewed' - op.STRING, - "insight viewed", - op.STRING, - "event", - op.GET_GLOBAL, - 1, - op.EQ, # and op.AND, 2, diff --git a/ee/clickhouse/views/experiments.py b/ee/clickhouse/views/experiments.py index 13ca4beb7f755..ffdbfcc16e428 100644 --- a/ee/clickhouse/views/experiments.py +++ b/ee/clickhouse/views/experiments.py @@ -3,7 +3,6 @@ from django.utils.timezone import now from rest_framework import serializers, viewsets -from rest_framework.decorators import action from rest_framework.exceptions import ValidationError from rest_framework.request import Request from rest_framework.response import Response @@ -24,6 +23,7 @@ from posthog.api.feature_flag import FeatureFlagSerializer, MinimalFeatureFlagSerializer from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import UserBasicSerializer +from posthog.api.utils import action from posthog.caching.insight_cache import update_cached_state from posthog.clickhouse.query_tagging import tag_queries from posthog.constants import INSIGHT_TRENDS diff --git a/ee/clickhouse/views/groups.py b/ee/clickhouse/views/groups.py index 950fa114fb9ca..3c20275b2de7b 100644 --- a/ee/clickhouse/views/groups.py +++ b/ee/clickhouse/views/groups.py @@ -5,7 +5,7 @@ from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiParameter from rest_framework import mixins, request, response, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import NotFound, ValidationError from rest_framework.pagination import CursorPagination diff --git a/ee/clickhouse/views/insights.py b/ee/clickhouse/views/insights.py index e6adf49e7ff9e..6072ab2957bb1 100644 --- a/ee/clickhouse/views/insights.py +++ b/ee/clickhouse/views/insights.py @@ -1,6 +1,6 @@ from typing import Any -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.permissions import SAFE_METHODS, BasePermission from rest_framework.request import Request from rest_framework.response import Response diff --git a/ee/clickhouse/views/person.py b/ee/clickhouse/views/person.py index c956394ad23a5..750ce4980982c 100644 --- a/ee/clickhouse/views/person.py +++ b/ee/clickhouse/views/person.py @@ -1,7 +1,7 @@ from typing import Optional from rest_framework import request, response -from rest_framework.decorators import action +from posthog.api.utils import action from ee.clickhouse.queries.funnels.funnel_correlation_persons import ( FunnelCorrelationActors, diff --git a/ee/models/dashboard_privilege.py b/ee/models/dashboard_privilege.py index 40c48ec9ca9d2..4dde1f4d13e95 100644 --- a/ee/models/dashboard_privilege.py +++ b/ee/models/dashboard_privilege.py @@ -6,23 +6,21 @@ # We call models that grant a user access to some resource (which isn't a grouping of users) a "privilege" class DashboardPrivilege(UUIDModel): - dashboard: models.ForeignKey = models.ForeignKey( + dashboard = models.ForeignKey( "posthog.Dashboard", on_delete=models.CASCADE, related_name="privileges", related_query_name="privilege", ) - user: models.ForeignKey = models.ForeignKey( + user = models.ForeignKey( "posthog.User", on_delete=models.CASCADE, related_name="explicit_dashboard_privileges", related_query_name="explicit_dashboard_privilege", ) - level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( - choices=Dashboard.RestrictionLevel.choices - ) - added_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + level = models.PositiveSmallIntegerField(choices=Dashboard.RestrictionLevel.choices) + added_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) class Meta: constraints = [ diff --git a/ee/models/event_definition.py b/ee/models/event_definition.py index 408fbcec43cf5..fc172c4ac3c8f 100644 --- a/ee/models/event_definition.py +++ b/ee/models/event_definition.py @@ -11,11 +11,11 @@ class EnterpriseEventDefinition(EventDefinition): on_delete=models.SET_NULL, related_name="event_definitions", ) - description: models.TextField = models.TextField(blank=True, null=True, default="") - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + description = models.TextField(blank=True, null=True, default="") + updated_at = models.DateTimeField(auto_now=True) updated_by = models.ForeignKey("posthog.User", null=True, on_delete=models.SET_NULL, blank=True) - verified: models.BooleanField = models.BooleanField(default=False, blank=True) - verified_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) + verified = models.BooleanField(default=False, blank=True) + verified_at = models.DateTimeField(null=True, blank=True) verified_by = models.ForeignKey( "posthog.User", null=True, diff --git a/ee/models/explicit_team_membership.py b/ee/models/explicit_team_membership.py index 2e41101cb3b6c..35330a11bbb73 100644 --- a/ee/models/explicit_team_membership.py +++ b/ee/models/explicit_team_membership.py @@ -12,23 +12,21 @@ class Level(models.IntegerChoices): MEMBER = 1, "member" ADMIN = 8, "administrator" - team: models.ForeignKey = models.ForeignKey( + team = models.ForeignKey( "posthog.Team", on_delete=models.CASCADE, related_name="explicit_memberships", related_query_name="explicit_membership", ) - parent_membership: models.ForeignKey = models.ForeignKey( + parent_membership = models.ForeignKey( "posthog.OrganizationMembership", on_delete=models.CASCADE, related_name="explicit_team_memberships", related_query_name="explicit_team_membership", ) - level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( - default=Level.MEMBER, choices=Level.choices - ) - joined_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + level = models.PositiveSmallIntegerField(default=Level.MEMBER, choices=Level.choices) + joined_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) class Meta: constraints = [ diff --git a/ee/models/feature_flag_role_access.py b/ee/models/feature_flag_role_access.py index 003ede1b25e9e..867f2d562b944 100644 --- a/ee/models/feature_flag_role_access.py +++ b/ee/models/feature_flag_role_access.py @@ -2,20 +2,20 @@ class FeatureFlagRoleAccess(models.Model): - feature_flag: models.ForeignKey = models.ForeignKey( + feature_flag = models.ForeignKey( "posthog.FeatureFlag", on_delete=models.CASCADE, related_name="access", related_query_name="access", ) - role: models.ForeignKey = models.ForeignKey( + role = models.ForeignKey( "Role", on_delete=models.CASCADE, related_name="feature_flag_access", related_query_name="feature_flag_access", ) - added_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + added_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) class Meta: constraints = [models.UniqueConstraint(fields=["role", "feature_flag"], name="unique_feature_flag_and_role")] diff --git a/ee/models/license.py b/ee/models/license.py index 283d9fd708760..5a18d8f8c585d 100644 --- a/ee/models/license.py +++ b/ee/models/license.py @@ -43,12 +43,12 @@ def first_valid(self) -> Optional["License"]: class License(models.Model): objects: LicenseManager = LicenseManager() - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - plan: models.CharField = models.CharField(max_length=200) - valid_until: models.DateTimeField = models.DateTimeField() - key: models.CharField = models.CharField(max_length=200) + created_at = models.DateTimeField(auto_now_add=True) + plan = models.CharField(max_length=200) + valid_until = models.DateTimeField() + key = models.CharField(max_length=200) # DEPRECATED: This is no longer used - max_users: models.IntegerField = models.IntegerField(default=None, null=True) # None = no restriction + max_users = models.IntegerField(default=None, null=True) # None = no restriction # NOTE: Remember to update the Billing Service as well. Long-term it will be the source of truth. SCALE_PLAN = "scale" diff --git a/ee/models/organization_resource_access.py b/ee/models/organization_resource_access.py index 201cb354177f1..924b3e9db2855 100644 --- a/ee/models/organization_resource_access.py +++ b/ee/models/organization_resource_access.py @@ -19,20 +19,16 @@ class Resources(models.TextChoices): INSIGHTS = "insights", "insights" DASHBOARDS = "dashboards", "dashboards" - resource: models.CharField = models.CharField(max_length=32, choices=Resources.choices) - access_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( - default=AccessLevel.CAN_ALWAYS_EDIT, choices=AccessLevel.choices - ) - organization: models.ForeignKey = models.ForeignKey( - Organization, on_delete=models.CASCADE, related_name="resource_access" - ) - created_by: models.ForeignKey = models.ForeignKey( + resource = models.CharField(max_length=32, choices=Resources.choices) + access_level = models.PositiveSmallIntegerField(default=AccessLevel.CAN_ALWAYS_EDIT, choices=AccessLevel.choices) + organization = models.ForeignKey(Organization, on_delete=models.CASCADE, related_name="resource_access") + created_by = models.ForeignKey( "posthog.User", on_delete=models.SET_NULL, null=True, ) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) class Meta: constraints = [ diff --git a/ee/models/property_definition.py b/ee/models/property_definition.py index a93b8e957511e..bb9b34fa406e1 100644 --- a/ee/models/property_definition.py +++ b/ee/models/property_definition.py @@ -5,12 +5,12 @@ class EnterprisePropertyDefinition(PropertyDefinition): - description: models.TextField = models.TextField(blank=True, null=True, default="") - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + description = models.TextField(blank=True, null=True, default="") + updated_at = models.DateTimeField(auto_now=True) updated_by = models.ForeignKey("posthog.User", null=True, on_delete=models.SET_NULL, blank=True) - verified: models.BooleanField = models.BooleanField(default=False, blank=True) - verified_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) + verified = models.BooleanField(default=False, blank=True) + verified_at = models.DateTimeField(null=True, blank=True) verified_by = models.ForeignKey( "posthog.User", null=True, diff --git a/ee/models/role.py b/ee/models/role.py index 00880812b9d82..f37170818dbc3 100644 --- a/ee/models/role.py +++ b/ee/models/role.py @@ -5,19 +5,19 @@ class Role(UUIDModel): - name: models.CharField = models.CharField(max_length=200) - organization: models.ForeignKey = models.ForeignKey( + name = models.CharField(max_length=200) + organization = models.ForeignKey( "posthog.Organization", on_delete=models.CASCADE, related_name="roles", related_query_name="role", ) - feature_flags_access_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( + feature_flags_access_level = models.PositiveSmallIntegerField( default=OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT, choices=OrganizationResourceAccess.AccessLevel.choices, ) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - created_by: models.ForeignKey = models.ForeignKey( + created_at = models.DateTimeField(auto_now_add=True) + created_by = models.ForeignKey( "posthog.User", on_delete=models.SET_NULL, related_name="roles", @@ -30,29 +30,29 @@ class Meta: class RoleMembership(UUIDModel): - role: models.ForeignKey = models.ForeignKey( + role = models.ForeignKey( "Role", on_delete=models.CASCADE, related_name="roles", related_query_name="role", ) # TODO: Eventually remove this as we only need the organization membership - user: models.ForeignKey = models.ForeignKey( + user = models.ForeignKey( "posthog.User", on_delete=models.CASCADE, related_name="role_memberships", related_query_name="role_membership", ) - organization_member: models.ForeignKey = models.ForeignKey( + organization_member = models.ForeignKey( "posthog.OrganizationMembership", on_delete=models.CASCADE, related_name="role_memberships", related_query_name="role_membership", null=True, ) - joined_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + joined_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) class Meta: constraints = [models.UniqueConstraint(fields=["role", "user"], name="unique_user_and_role")] diff --git a/ee/session_recordings/session_recording_playlist.py b/ee/session_recordings/session_recording_playlist.py index 80d88ac0fa0e8..28d3353c0576f 100644 --- a/ee/session_recordings/session_recording_playlist.py +++ b/ee/session_recordings/session_recording_playlist.py @@ -7,7 +7,7 @@ from django_filters.rest_framework import DjangoFilterBackend from loginas.utils import is_impersonated_session from rest_framework import request, response, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import PermissionDenied from posthog.api.forbid_destroy_model import ForbidDestroyModel diff --git a/frontend/__snapshots__/components-cards-insight-card--insight-card--dark.png b/frontend/__snapshots__/components-cards-insight-card--insight-card--dark.png index 216ebe2998ef3..1cf7b477f91e7 100644 Binary files a/frontend/__snapshots__/components-cards-insight-card--insight-card--dark.png and b/frontend/__snapshots__/components-cards-insight-card--insight-card--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-card--insight-card--light.png b/frontend/__snapshots__/components-cards-insight-card--insight-card--light.png index 210a97aab7b4d..c219df6b34aac 100644 Binary files a/frontend/__snapshots__/components-cards-insight-card--insight-card--light.png and b/frontend/__snapshots__/components-cards-insight-card--insight-card--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-multi--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends-multi--dark.png index d5998a8ecbbb1..00e36315461fe 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-multi--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends-multi--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-multi--light.png b/frontend/__snapshots__/components-cards-insight-details--trends-multi--light.png index a4576ed02f067..914a0a7bdc359 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-multi--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends-multi--light.png differ diff --git a/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--dark.png b/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--dark.png new file mode 100644 index 0000000000000..3f02eba67f4b1 Binary files /dev/null and b/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--dark.png differ diff --git a/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--light.png b/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--light.png new file mode 100644 index 0000000000000..97eeec8e58291 Binary files /dev/null and b/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--funnel-left-to-right-insight--dark.png b/frontend/__snapshots__/exporter-exporter--funnel-left-to-right-insight--dark.png index f736d569481b4..593d38dc63ae5 100644 Binary files a/frontend/__snapshots__/exporter-exporter--funnel-left-to-right-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--funnel-left-to-right-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--dark.png b/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--dark.png index 80f11e3545900..0ab5e4ed6406e 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--dark.png and b/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--light.png b/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--light.png index e6eb173228a49..58585281416a5 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--light.png and b/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--user-paths-insight--dark.png b/frontend/__snapshots__/exporter-exporter--user-paths-insight--dark.png index acc74d32b1329..7cff26c475c50 100644 Binary files a/frontend/__snapshots__/exporter-exporter--user-paths-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--user-paths-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--user-paths-insight--light.png b/frontend/__snapshots__/exporter-exporter--user-paths-insight--light.png index 7a1a6ee0a0a6b..333981c508581 100644 Binary files a/frontend/__snapshots__/exporter-exporter--user-paths-insight--light.png and b/frontend/__snapshots__/exporter-exporter--user-paths-insight--light.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png index 030f1724e0f3e..4086ce4b6a27e 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png index bcf05c9abf360..648af90308438 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-base--dark.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-base--dark.png index f981e52556b18..4037a2ef634ce 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-base--dark.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-base--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-base--light.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-base--light.png index 1581d4a4edae4..cab4754531e0a 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-base--light.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-base--light.png differ diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png index 9692fb26aa097..cddffb75044b7 100644 Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png index ad0c127a33fac..82bcb0f5bb0ce 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png index b316da52833e9..09792a848277f 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png index 5b169f7d87354..1b8480c6221a3 100644 Binary files a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png index f40b8da7ada3e..e406fef79ef8e 100644 Binary files a/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png differ diff --git a/frontend/__snapshots__/scenes-app-dashboards--edit--dark.png b/frontend/__snapshots__/scenes-app-dashboards--edit--dark.png index 02241c05b0510..fb7b9107acb31 100644 Binary files a/frontend/__snapshots__/scenes-app-dashboards--edit--dark.png and b/frontend/__snapshots__/scenes-app-dashboards--edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-dashboards--edit--light.png b/frontend/__snapshots__/scenes-app-dashboards--edit--light.png index ea51aea309fff..555d89d379f45 100644 Binary files a/frontend/__snapshots__/scenes-app-dashboards--edit--light.png and b/frontend/__snapshots__/scenes-app-dashboards--edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-dashboards--show--dark.png b/frontend/__snapshots__/scenes-app-dashboards--show--dark.png index 17acdf6a3854d..2c51c1dcbc24a 100644 Binary files a/frontend/__snapshots__/scenes-app-dashboards--show--dark.png and b/frontend/__snapshots__/scenes-app-dashboards--show--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-dashboards--show--light.png b/frontend/__snapshots__/scenes-app-dashboards--show--light.png index 7b2cf09f9f27c..0a4ea86b18c4a 100644 Binary files a/frontend/__snapshots__/scenes-app-dashboards--show--light.png and b/frontend/__snapshots__/scenes-app-dashboards--show--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png index ed2989ab6bc20..d254383069a86 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light--webkit.png index 61dad0d0766e4..1940933f16bce 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light.png b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light.png index 243c1cea204e1..ebf51524e67c1 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--stickiness-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark--webkit.png index 64d99b712d842..335b16fdfc3ce 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark.png index 39fab033ecbe3..f2ab6d8018611 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light--webkit.png index 2f87102a89afe..b5ef01afd3144 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light.png index 57a0cc2eaca3c..1ecf743c0a675 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--dark--webkit.png index 55bf81ce188f4..0b30896b49b77 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light--webkit.png index 4dcd20b4b0385..b11f8da52af3e 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light.png index fab546748b274..4494a987d7629 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-bar-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark--webkit.png index e5b89d0605c8e..ea5bb505ae65a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark.png index 0fd320603c08f..73ff3eb4b06d2 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light--webkit.png index 7679dbac61976..f73c42037b141 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light.png index c386f35d6f590..68556c8a2300d 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark.png index 972045ad6f932..086476ba716c0 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--light.png index 97cafb52341a0..1072316bf304a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark--webkit.png index f84ffcfeca674..b6fd3facddbab 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark.png index 0683b6ec5b810..d536ad9eb5cac 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light--webkit.png index 2b3e77629b9ff..e8f32791910e1 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light.png index 560130a8d43fa..399f128c91764 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--dark--webkit.png index bde049dababeb..756951deb6614 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light--webkit.png index 329801d59fc36..1097be4f08f67 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light.png index 165d63d09a6b5..dc241e8086113 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-pie-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--light.png index 5095d619040db..3e06506b96a9f 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark--webkit.png index 4fce62a2b668a..dd77d958263fb 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark.png index 81bce266164f8..d40098ec1d9ec 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light--webkit.png index 32eecac9a1425..9ea4836236b93 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light.png index 39e4a572c854a..2cdbc00422d69 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-table-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light--webkit.png index c5a68841ec463..3ee3f6bedc6be 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light.png index a8178333d0764..e58a0ca460bc8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light--webkit.png index 56160ec991fc9..6802705a2606f 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light.png b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light.png index aba24e56a0b36..a7c96c46245bd 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--trends-value-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-project-homepage--project-homepage--dark.png b/frontend/__snapshots__/scenes-app-project-homepage--project-homepage--dark.png index 6f0a54565311f..68560cb0bc84a 100644 Binary files a/frontend/__snapshots__/scenes-app-project-homepage--project-homepage--dark.png and b/frontend/__snapshots__/scenes-app-project-homepage--project-homepage--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-project-homepage--project-homepage--light.png b/frontend/__snapshots__/scenes-app-project-homepage--project-homepage--light.png index 9f5a2404bb23d..e125144a00562 100644 Binary files a/frontend/__snapshots__/scenes-app-project-homepage--project-homepage--light.png and b/frontend/__snapshots__/scenes-app-project-homepage--project-homepage--light.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--dark.png index 7c17549d8403c..e7c27e7ec2b9b 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--light.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--light.png index ada2bbf44246d..72d89788b7ef8 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--light.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-with-email--light.png differ diff --git a/frontend/src/exporter/Exporter.stories.tsx b/frontend/src/exporter/Exporter.stories.tsx index 00cc83dc74c57..7dba408f0cdd7 100644 --- a/frontend/src/exporter/Exporter.stories.tsx +++ b/frontend/src/exporter/Exporter.stories.tsx @@ -39,7 +39,6 @@ const Template: StoryFn = (props) => { ) } -Template.tags = ['test-skip'] // :FIXME: flaky tests, most likely due to resize observer changes export const TrendsLineInsight: Story = Template.bind({}) TrendsLineInsight.args = { insight: require('../mocks/fixtures/api/projects/team_id/insights/trendsLine.json') } @@ -202,6 +201,7 @@ StickinessInsight.tags = ['test-skip'] // doesn't produce a helpful reference im export const UserPathsInsight: Story = Template.bind({}) UserPathsInsight.args = { insight: require('../mocks/fixtures/api/projects/team_id/insights/userPaths.json') } +UserPathsInsight.tags = ['test-skip'] // FIXME: flaky tests, most likely due to resize observer changes export const Dashboard: Story = Template.bind({}) Dashboard.args = { dashboard } diff --git a/frontend/src/initKea.ts b/frontend/src/initKea.ts index 5117138b0446a..d3051cb01a06d 100644 --- a/frontend/src/initKea.ts +++ b/frontend/src/initKea.ts @@ -118,7 +118,28 @@ export function initKea({ routerHistory, routerLocation, beforePlugins }: InitKe plugins.push(loggerPlugin) } + if ((window as any).__REDUX_DEVTOOLS_EXTENSION__) { + // eslint-disable-next-line no-console + console.log('NB Redux Dev Tools are disabled on PostHog. See: https://github.com/PostHog/posthog/issues/17482') + } + resetContext({ plugins: plugins, + createStore: { + // Disable redux dev-tools's compose by passing `compose` from redux directly + compose: ((...funcs: any[]) => { + if (funcs.length === 0) { + return (arg: T) => arg + } + if (funcs.length === 1) { + return funcs[0] + } + return funcs.reduce( + (a, b) => + (...args: any) => + a(b(...args)) + ) + }) as any, + }, }) } diff --git a/frontend/src/layout/navigation-3000/navigationLogic.tsx b/frontend/src/layout/navigation-3000/navigationLogic.tsx index 4745b998e9814..79680e42e9023 100644 --- a/frontend/src/layout/navigation-3000/navigationLogic.tsx +++ b/frontend/src/layout/navigation-3000/navigationLogic.tsx @@ -399,7 +399,7 @@ export const navigation3000Logic = kea([ }, { identifier: Scene.PersonsManagement, - label: 'People', + label: 'People and groups', icon: , logic: isUsingSidebar ? personsAndGroupsSidebarLogic : undefined, to: isUsingSidebar ? undefined : urls.persons(), diff --git a/frontend/src/layout/navigation-3000/sidebars/insights.ts b/frontend/src/layout/navigation-3000/sidebars/insights.ts index f50cf71a2e9b5..c94b3df3ab23a 100644 --- a/frontend/src/layout/navigation-3000/sidebars/insights.ts +++ b/frontend/src/layout/navigation-3000/sidebars/insights.ts @@ -1,7 +1,5 @@ import { afterMount, connect, kea, listeners, path, reducers, selectors } from 'kea' import { subscriptions } from 'kea-subscriptions' -import { FEATURE_FLAGS } from 'lib/constants' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { deleteInsightWithUndo } from 'lib/utils/deleteWithUndo' import { insightsApi } from 'scenes/insights/utils/api' import { INSIGHTS_PER_PAGE, savedInsightsLogic } from 'scenes/saved-insights/savedInsightsLogic' @@ -28,8 +26,6 @@ export const insightsSidebarLogic = kea([ ['activeScene', 'sceneParams'], navigation3000Logic, ['searchTerm'], - featureFlagLogic, - ['featureFlags'], ], actions: [savedInsightsLogic, ['loadInsights', 'setSavedInsightsFilters', 'duplicateInsight']], })), @@ -49,10 +45,6 @@ export const insightsSidebarLogic = kea([ ], })), selectors(({ actions, values, cache }) => ({ - queryBasedInsightSaving: [ - (s) => [s.featureFlags], - (featureFlags) => !!featureFlags[FEATURE_FLAGS.QUERY_BASED_INSIGHTS_SAVING], - ], contents: [ (s) => [s.insights, s.infiniteInsights, s.insightsLoading, teamLogic.selectors.currentTeamId], (insights, infiniteInsights, insightsLoading, currentTeamId) => [ @@ -102,10 +94,6 @@ export const insightsSidebarLogic = kea([ object: insight, endpoint: `projects/${currentTeamId}/insights`, callback: actions.loadInsights, - options: { - writeAsQuery: values.queryBasedInsightSaving, - readAsQuery: true, - }, }) }, status: 'danger', @@ -115,11 +103,7 @@ export const insightsSidebarLogic = kea([ }, ], onRename: async (newName) => { - const updatedItem = await insightsApi.update( - insight.id, - { name: newName }, - { writeAsQuery: values.queryBasedInsightSaving, readAsQuery: true } - ) + const updatedItem = await insightsApi.update(insight.id, { name: newName }) insightsModel.actions.renameInsightSuccess(updatedItem) }, } as BasicListItem diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx index 73c0873322d34..74e4653f2d776 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx @@ -100,7 +100,30 @@ const SupportFormBlock = ({ onCancel }: { onCancel: () => void }): JSX.Element = return (
-
+ + + Submit + + + Cancel + +
+
Avg support response times @@ -131,28 +154,6 @@ const SupportFormBlock = ({ onCancel }: { onCancel: () => void }): JSX.Element = ) })}
- - - Submit - - - Cancel -
) } diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 3cb03eb58169e..99cd61b959ef4 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -10,16 +10,19 @@ import { SavedSessionRecordingPlaylistsResult } from 'scenes/session-recordings/ import { getCurrentExporterData } from '~/exporter/exporterViewLogic' import { + AlertType, + AlertTypeWrite, DatabaseSerializedFieldType, ErrorTrackingGroup, QuerySchema, QueryStatusResponse, + RecordingsQuery, + RecordingsQueryResponse, RefreshType, } from '~/queries/schema' import { ActionType, ActivityScope, - AlertType, AppMetricsTotalsV2Response, AppMetricsV2RequestParams, AppMetricsV2Response, @@ -93,7 +96,6 @@ import { SessionRecordingPlaylistType, SessionRecordingSnapshotParams, SessionRecordingSnapshotResponse, - SessionRecordingsResponse, SessionRecordingType, SharingConfigurationType, SlackChannelType, @@ -730,12 +732,12 @@ class ApiRequest { } // # Alerts - public alerts(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('alerts') + public alerts(id: InsightModel['id'], teamId?: TeamType['id']): ApiRequest { + return this.insight(id, teamId).addPathComponent('alerts') } - public alert(id: AlertType['id'], teamId?: TeamType['id']): ApiRequest { - return this.alerts(teamId).addPathComponent(id) + public alert(id: AlertType['id'], insightId: InsightModel['id'], teamId?: TeamType['id']): ApiRequest { + return this.alerts(insightId, teamId).addPathComponent(id) } // Resource Access Permissions @@ -1609,6 +1611,9 @@ const api = { async list(): Promise> { return await new ApiRequest().pluginConfigs().get() }, + async migrate(id: PluginConfigTypeNew['id']): Promise { + return await new ApiRequest().pluginConfig(id).withAction('migrate').create() + }, async logs(pluginConfigId: number, params: LogEntryRequestParams): Promise { const levels = (params.level?.split(',') ?? []).filter((x) => x !== 'WARNING') const response = await new ApiRequest() @@ -1747,7 +1752,7 @@ const api = { }, recordings: { - async list(params: Record): Promise { + async list(params: RecordingsQuery): Promise { return await new ApiRequest().recordings().withQueryString(toParams(params)).get() }, async getMatchingEvents(params: string): Promise<{ results: string[] }> { @@ -1835,7 +1840,7 @@ const api = { async listPlaylistRecordings( playlistId: SessionRecordingPlaylistType['short_id'], params: Record = {} - ): Promise { + ): Promise { return await new ApiRequest() .recordingPlaylist(playlistId) .withAction('recordings') @@ -2261,20 +2266,20 @@ const api = { }, alerts: { - async get(alertId: AlertType['id']): Promise { - return await new ApiRequest().alert(alertId).get() + async get(insightId: number, alertId: AlertType['id']): Promise { + return await new ApiRequest().alert(alertId, insightId).get() }, - async create(data: Partial): Promise { - return await new ApiRequest().alerts().create({ data }) + async create(insightId: number, data: Partial): Promise { + return await new ApiRequest().alerts(insightId).create({ data }) }, - async update(alertId: AlertType['id'], data: Partial): Promise { - return await new ApiRequest().alert(alertId).update({ data }) + async update(insightId: number, alertId: AlertType['id'], data: Partial): Promise { + return await new ApiRequest().alert(alertId, insightId).update({ data }) }, async list(insightId: number): Promise> { - return await new ApiRequest().alerts().withQueryString(`insight=${insightId}`).get() + return await new ApiRequest().alerts(insightId).get() }, - async delete(alertId: AlertType['id']): Promise { - return await new ApiRequest().alert(alertId).delete() + async delete(insightId: number, alertId: AlertType['id']): Promise { + return await new ApiRequest().alert(alertId, insightId).delete() }, }, diff --git a/frontend/src/lib/components/Alerts/AlertDeletionWarning.tsx b/frontend/src/lib/components/Alerts/AlertDeletionWarning.tsx index cf1e5d0ad492c..d3c798462e792 100644 --- a/frontend/src/lib/components/Alerts/AlertDeletionWarning.tsx +++ b/frontend/src/lib/components/Alerts/AlertDeletionWarning.tsx @@ -7,8 +7,16 @@ import { alertsLogic } from './alertsLogic' export function AlertDeletionWarning(): JSX.Element | null { const { insightProps, insight } = useValues(insightLogic) + if (!insight?.short_id) { + return null + } + const { shouldShowAlertDeletionWarning } = useValues( - alertsLogic({ insightShortId: insight.short_id!, insightLogicProps: insightProps }) + alertsLogic({ + insightShortId: insight.short_id, + insightId: insight.id as number, + insightLogicProps: insightProps, + }) ) if (!shouldShowAlertDeletionWarning || !insight.short_id) { diff --git a/frontend/src/lib/components/Alerts/AlertsModal.tsx b/frontend/src/lib/components/Alerts/AlertsModal.tsx index d1ad9d9baf722..22e57b1112b82 100644 --- a/frontend/src/lib/components/Alerts/AlertsModal.tsx +++ b/frontend/src/lib/components/Alerts/AlertsModal.tsx @@ -1,4 +1,4 @@ -import { LemonButton, LemonButtonWithDropdown } from '@posthog/lemon-ui' +import { LemonButton } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { router } from 'kea-router' import { FEATURE_FLAGS } from 'lib/constants' @@ -17,11 +17,11 @@ import { ManageAlerts } from './views/ManageAlerts' export interface AlertsModalProps extends AlertsLogicProps { isOpen: boolean closeModal: () => void - alertId: number | 'new' | null + alertId?: string | null } export function AlertsModal(props: AlertsModalProps): JSX.Element { - const { closeModal, insightShortId, insightLogicProps, alertId, isOpen } = props + const { closeModal, insightId, insightShortId, insightLogicProps, alertId, isOpen } = props const { push } = useActions(router) const { userLoading } = useValues(userLogic) @@ -32,14 +32,16 @@ export function AlertsModal(props: AlertsModalProps): JSX.Element { {!alertId ? ( push(urls.alert(insightShortId, id.toString()))} + onSelect={(id) => push(urls.alert(insightShortId, id ?? 'new'))} /> ) : ( push(urls.alerts(insightShortId))} @@ -62,36 +64,19 @@ export function AlertsButton({ insight }: AlertsButtonProps): JSX.Element { if (!showAlerts) { return <> } - if (!areAlertsSupportedForInsight(insight.query)) { - return ( - - Alerts - - ) - } + return ( - push(urls.alerts(insight.short_id!))} fullWidth - dropdown={{ - actionable: true, - closeParentPopoverOnClickInside: true, - placement: 'right-start', - overlay: ( - <> - push(urls.alert(insight.short_id!, 'new'))} fullWidth> - New alert - - push(urls.alerts(insight.short_id!))} fullWidth> - Manage alerts - - - ), - }} + disabledReason={ + !areAlertsSupportedForInsight(insight.query) + ? 'Insights are only available for trends represented as a number. Change the insight representation to add alerts.' + : undefined + } > - Alerts - + Manage alerts + ) } diff --git a/frontend/src/lib/components/Alerts/alertLogic.ts b/frontend/src/lib/components/Alerts/alertLogic.ts index 5887329a815ec..66b3fe483c5cd 100644 --- a/frontend/src/lib/components/Alerts/alertLogic.ts +++ b/frontend/src/lib/components/Alerts/alertLogic.ts @@ -1,38 +1,38 @@ -import { connect, kea, key, path, props } from 'kea' +import { connect, kea, key, listeners, path, props } from 'kea' import { forms } from 'kea-forms' import { loaders } from 'kea-loaders' import { router, urlToAction } from 'kea-router' import api from 'lib/api' import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' -import { isEmail } from 'lib/utils' -import { getInsightId } from 'scenes/insights/utils' import { urls } from 'scenes/urls' -import { AlertType } from '~/types' +import { AlertType, AlertTypeWrite } from '~/queries/schema' import type { alertLogicType } from './alertLogicType' import { alertsLogic, AlertsLogicProps } from './alertsLogic' export interface AlertLogicProps extends AlertsLogicProps { - id: number | 'new' + id?: string } export const alertLogic = kea([ path(['lib', 'components', 'Alerts', 'alertLogic']), props({} as AlertLogicProps), - key(({ id, insightShortId }) => `${insightShortId}-${id ?? 'new'}`), + key(({ id, insightId }) => `${insightId}-${id ?? 'new'}`), connect(() => ({ - actions: [alertsLogic, ['loadAlerts']], + actions: [alertsLogic, ['loadAlerts'], router, ['push']], })), loaders(({ props }) => ({ alert: { __default: undefined as unknown as AlertType, loadAlert: async () => { - if (props.id && props.id !== 'new') { - return await api.alerts.get(props.id) + if (props.id) { + return await api.alerts.get(props.insightId, props.id) + } + return { + enabled: true, } - return { anomaly_condition: { absoluteThreshold: {} } } }, }, })), @@ -40,40 +40,43 @@ export const alertLogic = kea([ forms(({ props, actions }) => ({ alert: { defaults: {} as unknown as AlertType, - errors: ({ name, target_value }) => ({ + errors: ({ name }) => ({ name: !name ? 'You need to give your alert a name' : undefined, - target_value: !target_value - ? 'This field is required.' - : !target_value.split(',').every((email) => isEmail(email)) - ? 'All emails must be valid' - : undefined, }), submit: async (alert) => { - const insightId = await getInsightId(props.insightShortId) - - const payload = { + const payload: AlertTypeWrite = { ...alert, - insight: insightId, + subscribed_users: alert.subscribed_users?.map(({ id }) => id), + insight: props.insightId, } - const updatedAlert: AlertType = - props.id === 'new' ? await api.alerts.create(payload) : await api.alerts.update(props.id, payload) + try { + const updatedAlert: AlertType = !props.id + ? await api.alerts.create(props.insightId, payload) + : await api.alerts.update(props.insightId, props.id, payload) - actions.resetAlert() - - if (updatedAlert.id !== props.id) { - router.actions.replace(urls.alerts(props.insightShortId)) - } + actions.resetAlert() - actions.loadAlerts() - actions.loadAlertSuccess(updatedAlert) - lemonToast.success(`Alert saved.`) + actions.loadAlerts() + actions.loadAlertSuccess(updatedAlert) + lemonToast.success(`Alert saved.`) - return updatedAlert + return updatedAlert + } catch (error: any) { + const field = error.data?.attr?.replace(/_/g, ' ') + lemonToast.error(`Error saving alert: ${field}: ${error.detail}`) + throw error + } }, }, })), + listeners(({ props }) => ({ + submitAlertSuccess: () => { + router.actions.push(urls.alerts(props.insightShortId)) + }, + })), + urlToAction(({ actions }) => ({ '/*/*/alerts/:id': () => { actions.loadAlert() diff --git a/frontend/src/lib/components/Alerts/alertsLogic.ts b/frontend/src/lib/components/Alerts/alertsLogic.ts index 3e24f5daa07d9..467d7c616b42a 100644 --- a/frontend/src/lib/components/Alerts/alertsLogic.ts +++ b/frontend/src/lib/components/Alerts/alertsLogic.ts @@ -2,14 +2,15 @@ import { actions, afterMount, connect, kea, key, listeners, path, props, reducer import { loaders } from 'kea-loaders' import api from 'lib/api' import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' -import { getInsightId } from 'scenes/insights/utils' +import { AlertType } from '~/queries/schema' import { isInsightVizNode, isTrendsQuery } from '~/queries/utils' -import { AlertType, ChartDisplayType, InsightLogicProps, InsightShortId } from '~/types' +import { ChartDisplayType, InsightLogicProps, InsightShortId } from '~/types' import type { alertsLogicType } from './alertsLogicType' export interface AlertsLogicProps { + insightId: number insightShortId: InsightShortId insightLogicProps: InsightLogicProps } @@ -27,9 +28,9 @@ export const areAlertsSupportedForInsight = (query?: Record | null) export const alertsLogic = kea([ path(['lib', 'components', 'Alerts', 'alertsLogic']), props({} as AlertsLogicProps), - key(({ insightShortId }) => `insight-${insightShortId}`), + key(({ insightId }) => `insight-${insightId}`), actions({ - deleteAlert: (id: number) => ({ id }), + deleteAlert: (id: string) => ({ id }), setShouldShowAlertDeletionWarning: (show: boolean) => ({ show }), }), @@ -41,11 +42,7 @@ export const alertsLogic = kea([ alerts: { __default: [] as AlertType[], loadAlerts: async () => { - const insightId = await getInsightId(props.insightShortId) - if (!insightId) { - return [] - } - const response = await api.alerts.list(insightId) + const response = await api.alerts.list(props.insightId) return response.results }, }, @@ -63,9 +60,9 @@ export const alertsLogic = kea([ ], }), - listeners(({ actions, values }) => ({ + listeners(({ actions, values, props }) => ({ deleteAlert: async ({ id }) => { - await api.alerts.delete(id) + await api.alerts.delete(props.insightId, id) }, setQuery: ({ query }) => { if (values.alerts.length === 0 || areAlertsSupportedForInsight(query)) { diff --git a/frontend/src/lib/components/Alerts/views/EditAlert.tsx b/frontend/src/lib/components/Alerts/views/EditAlert.tsx index 46d0dfd627cc1..eac20a8703086 100644 --- a/frontend/src/lib/components/Alerts/views/EditAlert.tsx +++ b/frontend/src/lib/components/Alerts/views/EditAlert.tsx @@ -1,11 +1,17 @@ -import { LemonInput } from '@posthog/lemon-ui' +import { LemonCheckbox, LemonInput } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { Form, Group } from 'kea-forms' +import { AlertStateIndicator } from 'lib/components/Alerts/views/ManageAlerts' +import { MemberSelectMultiple } from 'lib/components/MemberSelectMultiple' +import { TZLabel } from 'lib/components/TZLabel' +import { UserActivityIndicator } from 'lib/components/UserActivityIndicator/UserActivityIndicator' import { IconChevronLeft } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonField } from 'lib/lemon-ui/LemonField' import { LemonModal } from 'lib/lemon-ui/LemonModal' +import { AlertType } from '~/queries/schema' + import { alertLogic, AlertLogicProps } from '../alertLogic' import { alertsLogic } from '../alertsLogic' @@ -14,16 +20,54 @@ interface EditAlertProps extends AlertLogicProps { onDelete: () => void } +export function AlertState({ alert }: { alert: AlertType }): JSX.Element | null { + if (!alert.checks || alert.checks.length === 0) { + return null + } + + return ( +
+

+ Current status {alert.state === 'firing' ? 'firing' : 'not met'} + +

+ + + + + + + + + + + {alert.checks.map((check) => ( + + + + + + + ))} + +
StatusTimeValueTargets notified
{check.state === 'firing' ? 'Firing' : 'Not met'} + + {check.calculated_value}{check.targets_notified ? 'Yes' : 'No'}
+
+ ) +} + export function EditAlert(props: EditAlertProps): JSX.Element { const logic = alertLogic(props) const alertslogic = alertsLogic(props) const { alert, isAlertSubmitting, alertChanged } = useValues(logic) const { deleteAlert } = useActions(alertslogic) + const { setAlertValue } = useActions(logic) const id = props.id const _onDelete = (): void => { - if (id !== 'new') { + if (id) { deleteAlert(id) props.onDelete() } @@ -35,11 +79,11 @@ export function EditAlert(props: EditAlertProps): JSX.Element {
} onClick={props.onCancel} size="xsmall" /> -

{id === 'new' ? 'New' : 'Edit '} Alert

+

{!id ? 'New' : 'Edit '} Alert

- + {!alert ? (

Not found

@@ -47,42 +91,62 @@ export function EditAlert(props: EditAlertProps): JSX.Element {
) : ( <> - - - - - - - - - - - - - - - - - +
+ {alert?.created_by ? ( + + ) : null} + + + + + + + + + + u.id) ?? []} + idKey="id" + onChange={(value) => setAlertValue('subscribed_users', value)} + /> + + + + + + + + + + + +
+ )}
- {alert && id !== 'new' && ( + {alert && id && ( Delete alert @@ -92,7 +156,7 @@ export function EditAlert(props: EditAlertProps): JSX.Element { Cancel - {id === 'new' ? 'Create alert' : 'Save'} + {!id ? 'Create alert' : 'Save'} diff --git a/frontend/src/lib/components/Alerts/views/ManageAlerts.tsx b/frontend/src/lib/components/Alerts/views/ManageAlerts.tsx index 6f7c8f953ffa3..6d904f0133105 100644 --- a/frontend/src/lib/components/Alerts/views/ManageAlerts.tsx +++ b/frontend/src/lib/components/Alerts/views/ManageAlerts.tsx @@ -1,14 +1,26 @@ -import { IconEllipsis } from '@posthog/icons' +import { IconEllipsis, IconPause } from '@posthog/icons' import { useActions, useValues } from 'kea' +import { router } from 'kea-router' +import { IconPlayCircle } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonModal } from 'lib/lemon-ui/LemonModal' +import { LemonTag } from 'lib/lemon-ui/LemonTag' import { ProfileBubbles } from 'lib/lemon-ui/ProfilePicture' import { pluralize } from 'lib/utils' +import { urls } from 'scenes/urls' -import { AlertType } from '~/types' +import { AlertType } from '~/queries/schema' import { alertsLogic, AlertsLogicProps } from '../alertsLogic' +export function AlertStateIndicator({ alert }: { alert: AlertType }): JSX.Element { + return alert.state === 'firing' ? ( + + ) : ( + + ) +} + interface AlertListItemProps { alert: AlertType onClick: () => void @@ -16,15 +28,16 @@ interface AlertListItemProps { } export function AlertListItem({ alert, onClick, onDelete }: AlertListItemProps): JSX.Element { + const absoluteThreshold = alert.threshold?.configuration?.absoluteThreshold return ( : } sideAction={{ icon: , - dropdown: { overlay: ( <> @@ -45,9 +58,23 @@ export function AlertListItem({ alert, onClick, onDelete }: AlertListItemProps): >
-
{alert.name}
+
+ {alert.name} + {alert.enabled ? ( + <> + +
+ {absoluteThreshold?.lower && `Low ${absoluteThreshold.lower}`} + {absoluteThreshold?.lower && absoluteThreshold?.upper ? ' · ' : ''} + {absoluteThreshold?.upper && `High ${absoluteThreshold.upper}`} +
+ + ) : ( +
Disabled
+ )} +
- ({ email }))} /> + ({ email }))} />
) @@ -55,10 +82,11 @@ export function AlertListItem({ alert, onClick, onDelete }: AlertListItemProps): interface ManageAlertsProps extends AlertsLogicProps { onCancel: () => void - onSelect: (value: number | 'new') => void + onSelect: (value?: string) => void } export function ManageAlerts(props: ManageAlertsProps): JSX.Element { + const { push } = useActions(router) const logic = alertsLogic(props) const { alerts } = useValues(logic) @@ -67,15 +95,20 @@ export function ManageAlerts(props: ManageAlertsProps): JSX.Element { return ( <> -

Manage Alerts

+

+ Manage Alerts ALPHA +

+
+ With alerts, PostHog will monitor your insight and notify you when certain conditions are met. We do + not evaluate alerts in real-time, but rather on a schedule of once every hour. Please note that + alerts are in alpha and may not be fully reliable. +
{alerts.length ? (
- {alerts?.length} - {' active '} - {pluralize(alerts.length || 0, 'alert', 'alerts', false)} + {alerts?.length} {pluralize(alerts.length || 0, 'alert', 'alerts', false)}
{alerts.map((alert) => ( @@ -97,6 +130,9 @@ export function ManageAlerts(props: ManageAlertsProps): JSX.Element { + push(urls.alert(props.insightShortId, 'new'))}> + New alert + Close diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightCard.stories.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightCard.stories.tsx index b2e68d9bd6715..6ffbcf8f51e94 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightCard.stories.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightCard.stories.tsx @@ -17,7 +17,6 @@ import EXAMPLE_TRENDS_PIE from '../../../../mocks/fixtures/api/projects/team_id/ import EXAMPLE_TRENDS_TABLE from '../../../../mocks/fixtures/api/projects/team_id/insights/trendsTable.json' import EXAMPLE_TRENDS_HORIZONTAL_BAR from '../../../../mocks/fixtures/api/projects/team_id/insights/trendsValue.json' import EXAMPLE_TRENDS_WORLD_MAP from '../../../../mocks/fixtures/api/projects/team_id/insights/trendsWorldMap.json' -import EXAMPLE_PATHS from '../../../../mocks/fixtures/api/projects/team_id/insights/userPaths.json' import { InsightCard as InsightCardComponent } from './index' const examples = [ @@ -30,7 +29,6 @@ const examples = [ EXAMPLE_TRENDS_WORLD_MAP, EXAMPLE_FUNNEL, EXAMPLE_RETENTION, - EXAMPLE_PATHS, EXAMPLE_STICKINESS, EXAMPLE_LIFECYCLE, EXAMPLE_DATA_TABLE_NODE_HOGQL_QUERY, @@ -64,7 +62,6 @@ const meta: Meta = { control: { type: 'boolean' }, }, }, - tags: ['test-skip'], // :FIXME: flaky tests, most likely due to resize observer changes } export default meta export const InsightCard: Story = (args) => { diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx index 5cd43a5317597..833795d4f24b5 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx @@ -33,8 +33,6 @@ export interface InsightCardProps extends Resizeable, React.HTMLAttributes diff --git a/frontend/src/lib/components/MemberSelectMultiple.tsx b/frontend/src/lib/components/MemberSelectMultiple.tsx new file mode 100644 index 0000000000000..97900f97947b1 --- /dev/null +++ b/frontend/src/lib/components/MemberSelectMultiple.tsx @@ -0,0 +1,48 @@ +import { LemonInputSelect, ProfilePicture } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { fullName } from 'lib/utils' +import { useEffect } from 'react' +import { membersLogic } from 'scenes/organization/membersLogic' + +import { UserBasicType } from '~/types' + +type UserIdType = string | number + +export type MemberSelectMultipleProps = { + idKey: 'email' | 'uuid' | 'id' + value: UserIdType[] + onChange: (values: UserBasicType[]) => void +} + +export function MemberSelectMultiple({ idKey, value, onChange }: MemberSelectMultipleProps): JSX.Element { + const { filteredMembers, membersLoading } = useValues(membersLogic) + const { ensureAllMembersLoaded } = useActions(membersLogic) + + useEffect(() => { + ensureAllMembersLoaded() + }, []) + + const options = filteredMembers.map((member) => ({ + key: member.user[idKey].toString(), + label: fullName(member.user), + value: member.user[idKey], + icon: , + })) + + return ( + v.toString())} + loading={membersLoading} + onChange={(newValues: UserIdType[]) => { + const selectedUsers = filteredMembers.filter((member) => + newValues.includes(member.user[idKey].toString()) + ) + onChange(selectedUsers.map((member) => member.user)) + }} + mode="multiple" + options={options} + data-attr="subscribed-users" + /> + ) +} diff --git a/frontend/src/lib/components/Playlist/Playlist.scss b/frontend/src/lib/components/Playlist/Playlist.scss index d297c5928cb0b..6c4a6d4b9954f 100644 --- a/frontend/src/lib/components/Playlist/Playlist.scss +++ b/frontend/src/lib/components/Playlist/Playlist.scss @@ -51,7 +51,7 @@ .SessionRecordingPlaylistHeightWrapper { // NOTE: Somewhat random way to offset the various headers and tabs above the playlist - height: calc(100vh - 14rem); + height: calc(100vh - 15rem); min-height: 25rem; } diff --git a/frontend/src/lib/components/PropertyFilters/utils.ts b/frontend/src/lib/components/PropertyFilters/utils.ts index 3febc534c7df8..2c022d570e9ed 100644 --- a/frontend/src/lib/components/PropertyFilters/utils.ts +++ b/frontend/src/lib/components/PropertyFilters/utils.ts @@ -19,6 +19,7 @@ import { FilterLogicalOperator, GroupPropertyFilter, HogQLPropertyFilter, + LogEntryPropertyFilter, PersonPropertyFilter, PropertyDefinitionType, PropertyFilterType, @@ -103,6 +104,7 @@ export const PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE: Record v === filterType)?.[0] as | PropertyFilterType | undefined diff --git a/frontend/src/lib/components/TaxonomicFilter/types.ts b/frontend/src/lib/components/TaxonomicFilter/types.ts index 6ee6afd304203..a496095b8471b 100644 --- a/frontend/src/lib/components/TaxonomicFilter/types.ts +++ b/frontend/src/lib/components/TaxonomicFilter/types.ts @@ -109,6 +109,7 @@ export enum TaxonomicFilterGroupType { SessionProperties = 'session_properties', HogQLExpression = 'hogql_expression', Notebooks = 'notebooks', + LogEntries = 'log_entries', // Misc Replay = 'replay', } diff --git a/frontend/src/lib/components/UniversalFilters/utils.ts b/frontend/src/lib/components/UniversalFilters/utils.ts index 923ca44767385..f8b63af80ce5e 100644 --- a/frontend/src/lib/components/UniversalFilters/utils.ts +++ b/frontend/src/lib/components/UniversalFilters/utils.ts @@ -1,4 +1,4 @@ -import { ActionFilter, FilterLogicalOperator, RecordingPropertyFilter } from '~/types' +import { ActionFilter, FilterLogicalOperator, LogEntryPropertyFilter, RecordingPropertyFilter } from '~/types' import { isCohortPropertyFilter } from '../PropertyFilters/utils' import { UniversalFiltersGroup, UniversalFiltersGroupValue, UniversalFilterValue } from './UniversalFilters' @@ -6,23 +6,21 @@ import { UniversalFiltersGroup, UniversalFiltersGroupValue, UniversalFilterValue export function isUniversalGroupFilterLike(filter?: UniversalFiltersGroupValue): filter is UniversalFiltersGroup { return filter?.type === FilterLogicalOperator.And || filter?.type === FilterLogicalOperator.Or } - export function isEntityFilter(filter: UniversalFilterValue): filter is ActionFilter { return isEventFilter(filter) || isActionFilter(filter) } - export function isEventFilter(filter: UniversalFilterValue): filter is ActionFilter { return filter.type === 'events' } - export function isActionFilter(filter: UniversalFilterValue): filter is ActionFilter { return filter.type === 'actions' } - export function isRecordingPropertyFilter(filter: UniversalFilterValue): filter is RecordingPropertyFilter { return filter.type === 'recording' } - +export function isLogEntryPropertyFilter(filter: UniversalFilterValue): filter is LogEntryPropertyFilter { + return filter.type === 'log_entry' +} export function isEditableFilter(filter: UniversalFilterValue): boolean { return isEntityFilter(filter) ? false : !isCohortPropertyFilter(filter) } diff --git a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts index cc8262a73e2f0..2107640885e26 100644 --- a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts +++ b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts @@ -72,11 +72,11 @@ describe('versionCheckerLogic', () => { it.each([ { versionCount: 1, expectation: null }, { - versionCount: 10, + versionCount: 11, expectation: { latestUsedVersion: '1.0.0', - latestAvailableVersion: '1.0.9', - numVersionsBehind: 9, + latestAvailableVersion: '1.0.10', + numVersionsBehind: 10, level: 'info', }, }, @@ -86,7 +86,7 @@ describe('versionCheckerLogic', () => { latestUsedVersion: '1.0.0', latestAvailableVersion: '1.0.14', numVersionsBehind: 14, - level: 'warning', + level: 'info', }, }, { @@ -127,12 +127,7 @@ describe('versionCheckerLogic', () => { { version: '1.9.0', timestamp: '2023-01-01T12:00:00Z' }, { version: '1.83.1', timestamp: '2023-01-01T10:00:00Z' }, ], - expectation: { - latestAvailableVersion: '1.84.0', - latestUsedVersion: '1.83.1', - level: 'info', - numVersionsBehind: 1, - }, + expectation: null, }, { usedVersions: [ diff --git a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts index 4b08c9e3be675..3a86eedbd95a9 100644 --- a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts +++ b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts @@ -184,19 +184,20 @@ export const versionCheckerLogic = kea([ let level: 'warning' | 'info' | 'error' | undefined if (diff.kind === 'major' || numVersionsBehind >= 20) { level = 'error' - } else if ((diff.kind === 'minor' && diff.diff >= 5) || numVersionsBehind >= 10) { + } else if (diff.kind === 'minor' && diff.diff >= 15) { level = 'warning' - } else if (diff.kind === 'minor' || numVersionsBehind >= 5) { + } else if ((diff.kind === 'minor' && diff.diff >= 10) || numVersionsBehind >= 10) { level = 'info' } else if (latestUsedVersion.extra) { - // if we have an extra (alpha/beta/rc/etc) version, we should always show a warning if they aren't on the latest + // if we have an extra (alpha/beta/rc/etc.) version, we should always show a warning if they aren't on the latest level = 'warning' } else { // don't warn for a small number of patch versions behind level = undefined } - if (level) { + // we check if there is a "latest user version string" to avoid returning odd data in unexpected cases + if (level && !!versionToString(latestUsedVersion).trim().length) { warning = { latestUsedVersion: versionToString(latestUsedVersion), latestAvailableVersion: versionToString(latestAvailableVersion), diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index d9910233b794d..e33b6ee88bba7 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -162,8 +162,6 @@ export const FEATURE_FLAGS = { PRODUCT_SPECIFIC_ONBOARDING: 'product-specific-onboarding', // owner: @raquelmsmith REDIRECT_SIGNUPS_TO_INSTANCE: 'redirect-signups-to-instance', // owner: @raquelmsmith APPS_AND_EXPORTS_UI: 'apps-and-exports-ui', // owner: @benjackwhite - QUERY_BASED_DASHBOARD_CARDS: 'query-based-dashboard-cards', // owner: @thmsobrmlr - QUERY_BASED_INSIGHTS_SAVING: 'query-based-insights-saving', // owner: @thmsobrmlr HOGQL_DASHBOARD_ASYNC: 'hogql-dashboard-async', // owner: @webjunkie WEBHOOKS_DENYLIST: 'webhooks-denylist', // owner: #team-pipeline PIPELINE_UI: 'pipeline-ui', // owner: #team-pipeline diff --git a/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx b/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx index 870c0946279ab..71e743d5b6a57 100644 --- a/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx +++ b/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx @@ -20,13 +20,15 @@ export interface LemonMenuItemBase custom?: boolean } export interface LemonMenuItemNode extends LemonMenuItemBase { - items: (LemonMenuItemLeaf | false | null)[] + items: (LemonMenuItem | false | null)[] + placement?: LemonDropdownProps['placement'] keyboardShortcut?: never } export type LemonMenuItemLeaf = | (LemonMenuItemBase & { onClick: () => void items?: never + placement?: never keyboardShortcut?: KeyboardShortcut }) | (LemonMenuItemBase & { @@ -34,6 +36,7 @@ export type LemonMenuItemLeaf = disableClientSideRouting?: boolean targetBlank?: boolean items?: never + placement?: never keyboardShortcut?: KeyboardShortcut }) | (LemonMenuItemBase & { @@ -42,6 +45,7 @@ export type LemonMenuItemLeaf = disableClientSideRouting?: boolean targetBlank?: boolean items?: never + placement?: never keyboardShortcut?: KeyboardShortcut }) export interface LemonMenuItemCustom { @@ -52,6 +56,7 @@ export interface LemonMenuItemCustom { keyboardShortcut?: never /** True if the item is a custom element. */ custom?: boolean + placement?: never } export type LemonMenuItem = LemonMenuItemLeaf | LemonMenuItemCustom | LemonMenuItemNode @@ -243,7 +248,7 @@ interface LemonMenuItemButtonProps { const LemonMenuItemButton: FunctionComponent> = React.forwardRef( ( - { item: { label, items, keyboardShortcut, custom, ...buttonProps }, size, tooltipPlacement }, + { item: { label, items, placement, keyboardShortcut, custom, ...buttonProps }, size, tooltipPlacement }, ref ): JSX.Element => { const Label = typeof label === 'function' ? label : null @@ -272,7 +277,7 @@ const LemonMenuItemButton: FunctionComponent diff --git a/frontend/src/lib/lemon-ui/Link/Link.tsx b/frontend/src/lib/lemon-ui/Link/Link.tsx index 08a37f4366add..a8ce49710e678 100644 --- a/frontend/src/lib/lemon-ui/Link/Link.tsx +++ b/frontend/src/lib/lemon-ui/Link/Link.tsx @@ -1,7 +1,6 @@ import './Link.scss' import clsx from 'clsx' -import { useActions } from 'kea' import { router } from 'kea-router' import { isExternalLink } from 'lib/utils' import { getCurrentTeamId } from 'lib/utils/getAppContext' @@ -96,8 +95,6 @@ export const Link: React.FC> = Reac href: typeof to === 'string' ? to : undefined, }) - const { openSidePanel } = useActions(sidePanelStateLogic) - const onClick = (event: React.MouseEvent): void => { if (event.metaKey || event.ctrlKey) { event.stopPropagation() @@ -111,8 +108,26 @@ export const Link: React.FC> = Reac return } - if (typeof to === 'string' && isPostHogComDocs(to)) { + const mountedSidePanelLogic = sidePanelStateLogic.findMounted() + + if (typeof to === 'string' && isPostHogComDocs(to) && mountedSidePanelLogic) { + // TRICKY: We do this instead of hooks as there is some weird cyclic issue in tests + const { sidePanelOpen } = mountedSidePanelLogic.values + const { openSidePanel } = mountedSidePanelLogic.actions + event.preventDefault() + + const target = event.currentTarget + const container = document.getElementsByTagName('main')[0] + const topBar = document.getElementsByClassName('TopBar3000')[0] + if (!sidePanelOpen && container.contains(target)) { + setTimeout(() => { + // Little delay to allow the rendering of the side panel + const y = container.scrollTop + target.getBoundingClientRect().top - topBar.clientHeight + container.scrollTo({ top: y }) + }, 50) + } + openSidePanel(SidePanelTab.Docs, to) return } diff --git a/frontend/src/lib/utils/deleteWithUndo.tsx b/frontend/src/lib/utils/deleteWithUndo.tsx index 0977a1e3d6bb8..7812a880edd6f 100644 --- a/frontend/src/lib/utils/deleteWithUndo.tsx +++ b/frontend/src/lib/utils/deleteWithUndo.tsx @@ -1,6 +1,5 @@ import { lemonToast } from '@posthog/lemon-ui' import api from 'lib/api' -import { getInsightModel, InsightsApiOptions } from 'scenes/insights/utils/api' import { QueryBasedInsightModel } from '~/types' @@ -40,7 +39,6 @@ export async function deleteWithUndo>({ * when given a query based insight */ export async function deleteInsightWithUndo({ undo = false, - options, ...props }: { undo?: boolean @@ -48,10 +46,9 @@ export async function deleteInsightWithUndo({ object: QueryBasedInsightModel idField?: keyof QueryBasedInsightModel callback?: (undo: boolean, object: QueryBasedInsightModel) => void - options: InsightsApiOptions }): Promise { await api.update(`api/${props.endpoint}/${props.object[props.idField || 'id']}`, { - ...getInsightModel(props.object, options.writeAsQuery), + ...props.object, deleted: !undo, }) props.callback?.(undo, props.object) @@ -66,7 +63,7 @@ export async function deleteInsightWithUndo({ ? undefined : { label: 'Undo', - action: () => deleteInsightWithUndo({ undo: true, options, ...props }), + action: () => deleteInsightWithUndo({ undo: true, ...props }), }, } ) diff --git a/frontend/src/lib/utils/eventUsageLogic.ts b/frontend/src/lib/utils/eventUsageLogic.ts index 63b62be00ee67..2e6db6758873d 100644 --- a/frontend/src/lib/utils/eventUsageLogic.ts +++ b/frontend/src/lib/utils/eventUsageLogic.ts @@ -3,7 +3,7 @@ import { BarStatus, ResultType } from 'lib/components/CommandBar/types' import { convertPropertyGroupToProperties, isGroupPropertyFilter, - isRecordingPropertyFilter, + isLogEntryPropertyFilter, isValidPropertyFilter, } from 'lib/components/PropertyFilters/utils' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' @@ -71,7 +71,9 @@ import type { eventUsageLogicType } from './eventUsageLogicType' export enum DashboardEventSource { LongPress = 'long_press', MoreDropdown = 'more_dropdown', - DashboardHeader = 'dashboard_header', + DashboardHeaderSaveDashboard = 'dashboard_header_save_dashboard', + DashboardHeaderDiscardChanges = 'dashboard_header_discard_changes', + DashboardHeaderExitFullscreen = 'dashboard_header_exit_fullscreen', Hotkey = 'hotkey', InputEnter = 'input_enter', Toast = 'toast', @@ -930,9 +932,7 @@ export const eventUsageLogic = kea([ const eventFilters = filterValues.filter(isEventFilter) const actionFilters = filterValues.filter(isActionFilter) const propertyFilters = filterValues.filter(isValidPropertyFilter) - const consoleLogFilters = propertyFilters - .filter(isRecordingPropertyFilter) - .filter((f) => ['console_log_level', 'console_log_query'].includes(f.key)) + const consoleLogFilters = propertyFilters.filter(isLogEntryPropertyFilter) const filterBreakdown = filters && defaultDurationFilter diff --git a/frontend/src/lib/utils/semver.test.ts b/frontend/src/lib/utils/semver.test.ts index 31bcc58cdf5c9..67ee07860fc7f 100644 --- a/frontend/src/lib/utils/semver.test.ts +++ b/frontend/src/lib/utils/semver.test.ts @@ -1,4 +1,4 @@ -import { highestVersion, lowestVersion, parseVersion, versionToString } from './semver' +import { createVersionChecker, highestVersion, lowestVersion, parseVersion, versionToString } from './semver' describe('semver', () => { describe('parseVersion', () => { @@ -52,4 +52,14 @@ describe('semver', () => { expect(versionToString({ major: 1 })).toEqual('1') }) }) + describe('createVersionChecker', () => { + it('should create a version checker that checks that a version is above or equal to a specified version', () => { + const isSupportedVersion = createVersionChecker('4.5.6') + expect(isSupportedVersion('1.2.3')).toEqual(false) + expect(isSupportedVersion('4.5.6')).toEqual(true) + expect(isSupportedVersion('4.5.7')).toEqual(true) + expect(isSupportedVersion('7.8.9')).toEqual(true) + expect(isSupportedVersion('4.5.6-alpha')).toEqual(false) + }) + }) }) diff --git a/frontend/src/lib/utils/semver.ts b/frontend/src/lib/utils/semver.ts index 5a8f4606d7247..79cf377c51584 100644 --- a/frontend/src/lib/utils/semver.ts +++ b/frontend/src/lib/utils/semver.ts @@ -106,3 +106,10 @@ export function versionToString(version: SemanticVersion): string { } return versionPart } + +export function createVersionChecker(requiredVersion: string | SemanticVersion) { + return (version: string | SemanticVersion): boolean => { + const diff = diffVersions(version, requiredVersion) + return !diff || diff.diff > 0 + } +} diff --git a/frontend/src/models/insightsModel.tsx b/frontend/src/models/insightsModel.tsx index 976a952ea5f70..f72e48c4ff35c 100644 --- a/frontend/src/models/insightsModel.tsx +++ b/frontend/src/models/insightsModel.tsx @@ -1,9 +1,7 @@ import { LemonDialog, LemonInput } from '@posthog/lemon-ui' -import { actions, connect, kea, listeners, path, selectors } from 'kea' -import { FEATURE_FLAGS } from 'lib/constants' +import { actions, connect, kea, listeners, path } from 'kea' import { LemonField } from 'lib/lemon-ui/LemonField' import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { insightsApi } from 'scenes/insights/utils/api' import { teamLogic } from 'scenes/teamLogic' @@ -13,7 +11,7 @@ import type { insightsModelType } from './insightsModelType' export const insightsModel = kea([ path(['models', 'insightsModel']), - connect({ values: [featureFlagLogic, ['featureFlags']], logic: [teamLogic] }), + connect({ logic: [teamLogic] }), actions(() => ({ renameInsight: (item: QueryBasedInsightModel) => ({ item }), renameInsightSuccess: (item: QueryBasedInsightModel) => ({ item }), @@ -25,13 +23,7 @@ export const insightsModel = kea([ insightIds, }), })), - selectors({ - queryBasedInsightSaving: [ - (s) => [s.featureFlags], - (featureFlags) => !!featureFlags[FEATURE_FLAGS.QUERY_BASED_INSIGHTS_SAVING], - ], - }), - listeners(({ actions, values }) => ({ + listeners(({ actions }) => ({ renameInsight: async ({ item }) => { LemonDialog.openForm({ title: 'Rename insight', @@ -45,11 +37,7 @@ export const insightsModel = kea([ insightName: (name) => (!name ? 'You must enter a name' : undefined), }, onSubmit: async ({ insightName }) => { - const updatedItem = await insightsApi.update( - item.id, - { name: insightName }, - { writeAsQuery: values.queryBasedInsightSaving, readAsQuery: true } - ) + const updatedItem = await insightsApi.update(item.id, { name: insightName }) lemonToast.success( <> Renamed insight from {item.name} to {insightName} @@ -60,10 +48,7 @@ export const insightsModel = kea([ }) }, duplicateInsight: async ({ item }) => { - const addedItem = await insightsApi.duplicate(item, { - writeAsQuery: values.queryBasedInsightSaving, - readAsQuery: true, - }) + const addedItem = await insightsApi.duplicate(item) actions.duplicateInsightSuccess(addedItem) lemonToast.success('Insight duplicated') diff --git a/frontend/src/queries/Query/Query.tsx b/frontend/src/queries/Query/Query.tsx index 3828f98f2e431..d5a4bb5755a99 100644 --- a/frontend/src/queries/Query/Query.tsx +++ b/frontend/src/queries/Query/Query.tsx @@ -1,5 +1,4 @@ import { LemonDivider } from 'lib/lemon-ui/LemonDivider' -import { SpinnerOverlay } from 'lib/lemon-ui/Spinner' import { useEffect, useState } from 'react' import { HogDebug } from 'scenes/debug/HogDebug' @@ -38,14 +37,12 @@ export interface QueryProps { cachedResults?: AnyResponseType /** Disable any changes to the query */ readOnly?: boolean - /** Show a stale overlay */ - stale?: boolean /** Reduce UI elements to only show data */ embedded?: boolean } export function Query(props: QueryProps): JSX.Element | null { - const { query: propsQuery, setQuery: propsSetQuery, readOnly, stale, embedded } = props + const { query: propsQuery, setQuery: propsSetQuery, readOnly, embedded } = props const [localQuery, localSetQuery] = useState(propsQuery) useEffect(() => { @@ -139,7 +136,6 @@ export function Query(props: QueryProps): JSX.Element | null
) : null} - {stale && } {component} diff --git a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts index a0b3467f458df..79b3991486d5d 100644 --- a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts @@ -537,57 +537,52 @@ export const dataVisualizationLogic = kea([ }, ], tabularData: [ - (state) => [state.selectedTabularSeries, state.response, state.columns], - (selectedTabularSeries, response, columns): any[][] => { - if (!response || selectedTabularSeries === null || selectedTabularSeries.length === 0) { + (state) => [state.tabularColumns, state.response], + (tabularColumns, response): any[][] => { + if (!response || tabularColumns === null) { return [] } const data: any[] = response?.['results'] ?? response?.['result'] ?? [] return data.map((row: any[]) => { - return selectedTabularSeries.map((series) => { - if (!series) { - return null - } - - const column = columns.find((n) => n.name === series.name) + return tabularColumns.map((column) => { if (!column) { return null } - const value = row[column.dataIndex] + const value = row[column.column.dataIndex] - if (column.type.isNumerical) { + if (column.column.type.isNumerical) { try { if (value === null) { return value } - const multiplier = series.settings.formatting?.style === 'percent' ? 100 : 1 + const multiplier = column.settings?.formatting?.style === 'percent' ? 100 : 1 - if (series.settings.formatting?.decimalPlaces) { + if (column.settings?.formatting?.decimalPlaces) { return formatDataWithSettings( parseFloat( (parseFloat(value) * multiplier).toFixed( - series.settings.formatting.decimalPlaces + column.settings.formatting.decimalPlaces ) ), - series.settings + column.settings ) } const isInt = Number.isInteger(value) return formatDataWithSettings( isInt ? parseInt(value, 10) * multiplier : parseFloat(value) * multiplier, - series.settings + column.settings ) } catch { return 0 } } - return formatDataWithSettings(value, series.settings) + return formatDataWithSettings(value, column.settings) }) }) }, @@ -595,11 +590,11 @@ export const dataVisualizationLogic = kea([ tabularColumns: [ (state) => [state.selectedTabularSeries, state.response, state.columns], (selectedTabularSeries, response, columns): AxisSeries[] => { - if (!response || selectedTabularSeries === null || selectedTabularSeries.length === 0) { + if (!response) { return [] } - return selectedTabularSeries + const selectedColumns = (selectedTabularSeries || []) .map((series): AxisSeries | null => { if (!series) { return null @@ -617,6 +612,15 @@ export const dataVisualizationLogic = kea([ } }) .filter((series): series is AxisSeries => Boolean(series)) + + if (selectedColumns.length === 0) { + return columns.map((column) => ({ + column, + data: [], + settings: { formatting: { prefix: '', suffix: '' } }, + })) + } + return selectedColumns }, ], dataVisualizationProps: [() => [(_, props) => props], (props): DataVisualizationLogicProps => props], diff --git a/frontend/src/queries/nodes/InsightQuery/utils/queryNodeToFilter.ts b/frontend/src/queries/nodes/InsightQuery/utils/queryNodeToFilter.ts index 01b4cfadfd526..9a8f36c6cb548 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/queryNodeToFilter.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/queryNodeToFilter.ts @@ -10,7 +10,6 @@ import { InsightNodeKind, InsightQueryNode, LifecycleFilterLegacy, - Node, NodeKind, PathsFilterLegacy, RetentionFilterLegacy, @@ -22,14 +21,13 @@ import { isDataWarehouseNode, isEventsNode, isFunnelsQuery, - isInsightVizNode, isLifecycleQuery, isPathsQuery, isRetentionQuery, isStickinessQuery, isTrendsQuery, } from '~/queries/utils' -import { ActionFilter, EntityTypes, FilterType, InsightType, QueryBasedInsightModel } from '~/types' +import { ActionFilter, EntityTypes, FilterType, InsightType } from '~/types' type FilterTypeActionsAndEvents = { events?: ActionFilter[] @@ -123,28 +121,6 @@ const nodeKindToFilterKey: Record = { [NodeKind.LifecycleQuery]: 'lifecycleFilter', } -/** Returns a `query` or converted `filters` for a query based insight, - * depending on the feature flag. This is necessary as we want to - * transition to query based insights on the frontend, while the backend - * still has filter based insights (and or conversion function is frontend side). - * - * The feature flag can be enabled once we want to persist query based insights - * backend side. Once the flag is rolled out 100% this function becomes obsolete. - */ -export const getInsightFilterOrQueryForPersistance = ( - insight: QueryBasedInsightModel, - queryBasedInsightSavingFlag: boolean -): { filters: Partial | undefined; query: Node> | null | undefined } => { - let filters - let query - if (!queryBasedInsightSavingFlag && isInsightVizNode(insight.query)) { - filters = queryNodeToFilter(insight.query.source) - } else { - query = insight.query - } - return { filters, query } -} - export const queryNodeToFilter = (query: InsightQueryNode): Partial => { const filters: Partial = objectClean({ insight: nodeKindToInsightType[query.kind], diff --git a/frontend/src/queries/nodes/InsightViz/InsightViz.tsx b/frontend/src/queries/nodes/InsightViz/InsightViz.tsx index 7bbd78602377c..694c206dd1333 100644 --- a/frontend/src/queries/nodes/InsightViz/InsightViz.tsx +++ b/frontend/src/queries/nodes/InsightViz/InsightViz.tsx @@ -3,7 +3,7 @@ import './InsightViz.scss' import clsx from 'clsx' import { BindLogic, useValues } from 'kea' import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' -import React, { useState } from 'react' +import { useState } from 'react' import { insightLogic } from 'scenes/insights/insightLogic' import { insightSceneLogic } from 'scenes/insights/insightSceneLogic' import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' @@ -78,9 +78,19 @@ export function InsightViz({ uniqueKey, query, setQuery, context, readOnly, embe const showingResults = query.showResults ?? true const isEmbedded = embedded || (query.embedded ?? false) - const Wrapper = ({ children }: { children: React.ReactElement }): JSX.Element => { - return isEmbedded ? <>{children} :
{children}
- } + const display = ( + + ) return ( @@ -98,20 +108,7 @@ export function InsightViz({ uniqueKey, query, setQuery, context, readOnly, embe {!readOnly && ( )} - - - - + {!isEmbedded ?
{display}
: display}
diff --git a/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx b/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx index 3c5748cba6dd6..f4516279a254c 100644 --- a/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx +++ b/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx @@ -19,7 +19,9 @@ import { queryNodeToFilter } from '../InsightQuery/utils/queryNodeToFilter' export function TrendsSeries(): JSX.Element | null { const { insightProps } = useValues(insightLogic) - const { querySource, isLifecycle, isStickiness, display, hasFormula } = useValues(insightVizDataLogic(insightProps)) + const { querySource, isLifecycle, isStickiness, display, hasFormula, series } = useValues( + insightVizDataLogic(insightProps) + ) const { updateQuerySource } = useActions(insightVizDataLogic(insightProps)) const { showGroupsOptions, groupsTaxonomicTypes } = useValues(groupsModel) @@ -88,6 +90,7 @@ export function TrendsSeries(): JSX.Element | null { TaxonomicFilterGroupType.Actions, TaxonomicFilterGroupType.DataWarehouse, ]} + hideDeleteBtn={series?.length === 1} /> ) diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index c6bd48628bd45..a77404a9ffd1a 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -1,18 +1,6 @@ { "$schema": "http://json-schema.org/draft-07/schema#", "definitions": { - "AbsoluteThreshold": { - "additionalProperties": false, - "properties": { - "lower": { - "type": ["number", "null"] - }, - "upper": { - "type": ["number", "null"] - } - }, - "type": "object" - }, "ActionsNode": { "additionalProperties": false, "properties": { @@ -195,14 +183,143 @@ "enum": ["numeric", "duration", "duration_ms", "percentage", "percentage_scaled"], "type": "string" }, - "AnomalyCondition": { + "AlertCheck": { "additionalProperties": false, "properties": { - "absoluteThreshold": { - "$ref": "#/definitions/AbsoluteThreshold" + "calculated_value": { + "type": "number" + }, + "created_at": { + "type": "string" + }, + "id": { + "type": "string" + }, + "state": { + "type": "string" + }, + "targets_notified": { + "type": "boolean" + } + }, + "required": ["id", "created_at", "calculated_value", "state", "targets_notified"], + "type": "object" + }, + "AlertCondition": { + "additionalProperties": false, + "type": "object" + }, + "AlertType": { + "additionalProperties": false, + "properties": { + "checks": { + "items": { + "$ref": "#/definitions/AlertCheck" + }, + "type": "array" + }, + "condition": { + "$ref": "#/definitions/AlertCondition" + }, + "created_at": { + "type": "string" + }, + "created_by": { + "$ref": "#/definitions/UserBasicType" + }, + "enabled": { + "type": "boolean" + }, + "id": { + "type": "string" + }, + "insight": { + "type": "number" + }, + "last_notified_at": { + "type": "string" + }, + "name": { + "type": "string" + }, + "state": { + "type": "string" + }, + "subscribed_users": { + "items": { + "$ref": "#/definitions/UserBasicType" + }, + "type": "array" + }, + "threshold": { + "additionalProperties": false, + "properties": { + "configuration": { + "$ref": "#/definitions/InsightThreshold" + } + }, + "required": ["configuration"], + "type": "object" + } + }, + "required": [ + "checks", + "condition", + "created_at", + "created_by", + "enabled", + "id", + "insight", + "last_notified_at", + "name", + "state", + "subscribed_users", + "threshold" + ], + "type": "object" + }, + "AlertTypeBase": { + "additionalProperties": false, + "properties": { + "condition": { + "$ref": "#/definitions/AlertCondition" + }, + "enabled": { + "type": "boolean" + }, + "insight": { + "type": "number" + }, + "name": { + "type": "string" + } + }, + "required": ["name", "condition", "enabled", "insight"], + "type": "object" + }, + "AlertTypeWrite": { + "additionalProperties": false, + "properties": { + "condition": { + "$ref": "#/definitions/AlertCondition" + }, + "enabled": { + "type": "boolean" + }, + "insight": { + "type": "number" + }, + "name": { + "type": "string" + }, + "subscribed_users": { + "items": { + "type": "integer" + }, + "type": "array" } }, - "required": ["absoluteThreshold"], + "required": ["condition", "enabled", "insight", "name", "subscribed_users"], "type": "object" }, "AnyDataNode": { @@ -310,6 +427,9 @@ { "$ref": "#/definitions/RecordingPropertyFilter" }, + { + "$ref": "#/definitions/LogEntryPropertyFilter" + }, { "$ref": "#/definitions/GroupPropertyFilter" }, @@ -4742,6 +4862,10 @@ } ] }, + "HedgehogColorOptions": { + "enum": ["green", "red", "blue", "purple", "dark", "light", "sepia", "invert", "invert-hue", "greyscale"], + "type": "string" + }, "HogLanguage": { "enum": ["hog", "hogJson", "hogQL", "hogQLExpr", "hogTemplate"], "type": "string" @@ -4994,7 +5118,7 @@ "description": "HogQL Query Options are automatically set per team. However, they can be overriden in the query.", "properties": { "bounceRatePageViewMode": { - "enum": ["count_pageviews", "uniq_urls"], + "enum": ["count_pageviews", "uniq_urls", "uniq_page_screen_autocaptures"], "type": "string" }, "dataWarehouseEventsModifiers": { @@ -5035,7 +5159,7 @@ "type": "string" }, "propertyGroupsMode": { - "enum": ["enabled", "disabled"], + "enum": ["enabled", "disabled", "optimized"], "type": "string" }, "s3TableUseInvalidColumns": { @@ -5475,6 +5599,15 @@ "InsightShortId": { "type": "string" }, + "InsightThreshold": { + "additionalProperties": false, + "properties": { + "absoluteThreshold": { + "$ref": "#/definitions/InsightsThresholdAbsolute" + } + }, + "type": "object" + }, "InsightVizNode": { "additionalProperties": false, "properties": { @@ -5777,6 +5910,18 @@ "required": ["kind"], "type": "object" }, + "InsightsThresholdAbsolute": { + "additionalProperties": false, + "properties": { + "lower": { + "type": "number" + }, + "upper": { + "type": "number" + } + }, + "type": "object" + }, "IntervalType": { "enum": ["minute", "hour", "day", "week", "month"], "type": "string" @@ -5925,6 +6070,55 @@ "enum": ["new", "resurrecting", "returning", "dormant"], "type": "string" }, + "LogEntryPropertyFilter": { + "additionalProperties": false, + "properties": { + "key": { + "type": "string" + }, + "label": { + "type": "string" + }, + "operator": { + "$ref": "#/definitions/PropertyOperator" + }, + "type": { + "const": "log_entry", + "type": "string" + }, + "value": { + "$ref": "#/definitions/PropertyFilterValue" + } + }, + "required": ["key", "operator", "type"], + "type": "object" + }, + "MatchedRecording": { + "additionalProperties": false, + "properties": { + "events": { + "items": { + "$ref": "#/definitions/MatchedRecordingEvent" + }, + "type": "array" + }, + "session_id": { + "type": "string" + } + }, + "required": ["events"], + "type": "object" + }, + "MatchedRecordingEvent": { + "additionalProperties": false, + "properties": { + "uuid": { + "type": "string" + } + }, + "required": ["uuid"], + "type": "object" + }, "MathType": { "anyOf": [ { @@ -5944,6 +6138,32 @@ } ] }, + "MinimalHedgehogConfig": { + "additionalProperties": false, + "properties": { + "accessories": { + "items": { + "type": "string" + }, + "type": "array" + }, + "color": { + "anyOf": [ + { + "$ref": "#/definitions/HedgehogColorOptions" + }, + { + "type": "null" + } + ] + }, + "use_as_profile": { + "type": "boolean" + } + }, + "required": ["use_as_profile", "color", "accessories"], + "type": "object" + }, "MultipleBreakdownOptions": { "additionalProperties": false, "properties": { @@ -5977,6 +6197,7 @@ "FunnelsActorsQuery", "FunnelCorrelationActorsQuery", "SessionsTimelineQuery", + "RecordingsQuery", "SessionAttributionExplorerQuery", "ErrorTrackingQuery", "DataTableNode", @@ -6266,8 +6487,40 @@ "required": ["key", "operator", "type"], "type": "object" }, + "PersonType": { + "additionalProperties": false, + "properties": { + "created_at": { + "type": "string" + }, + "distinct_ids": { + "items": { + "type": "string" + }, + "type": "array" + }, + "id": { + "type": "string" + }, + "is_identified": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "properties": { + "type": "object" + }, + "uuid": { + "type": "string" + } + }, + "required": ["distinct_ids", "properties"], + "type": "object" + }, "PersonsNode": { "additionalProperties": false, + "deprecated": "Use `ActorsQuery` instead.", "properties": { "cohort": { "type": "integer" @@ -6323,6 +6576,7 @@ "session", "cohort", "recording", + "log_entry", "group", "hogql", "data_warehouse", @@ -8039,14 +8293,6 @@ { "$ref": "#/definitions/DurationType" }, - { - "const": "console_log_level", - "type": "string" - }, - { - "const": "console_log_query", - "type": "string" - }, { "const": "snapshot_source", "type": "string" @@ -8074,6 +8320,112 @@ "required": ["key", "operator", "type"], "type": "object" }, + "RecordingsQuery": { + "additionalProperties": false, + "properties": { + "actions": { + "items": { + "type": "object" + }, + "type": "array" + }, + "console_log_filters": { + "items": { + "$ref": "#/definitions/LogEntryPropertyFilter" + }, + "type": "array" + }, + "date_from": { + "type": ["string", "null"] + }, + "date_to": { + "type": ["string", "null"] + }, + "events": { + "items": { + "type": "object" + }, + "type": "array" + }, + "filter_test_accounts": { + "type": "boolean" + }, + "having_predicates": { + "items": { + "$ref": "#/definitions/AnyPropertyFilter" + }, + "type": "array" + }, + "kind": { + "const": "RecordingsQuery", + "type": "string" + }, + "limit": { + "type": "integer" + }, + "modifiers": { + "$ref": "#/definitions/HogQLQueryModifiers", + "description": "Modifiers used when performing the query" + }, + "offset": { + "type": "integer" + }, + "operand": { + "$ref": "#/definitions/FilterLogicalOperator" + }, + "order": { + "anyOf": [ + { + "$ref": "#/definitions/DurationType" + }, + { + "const": "start_time", + "type": "string" + }, + { + "const": "console_error_count", + "type": "string" + } + ] + }, + "person_uuid": { + "type": "string" + }, + "properties": { + "items": { + "$ref": "#/definitions/AnyPropertyFilter" + }, + "type": "array" + }, + "response": { + "$ref": "#/definitions/RecordingsQueryResponse" + }, + "session_ids": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "required": ["kind", "order"], + "type": "object" + }, + "RecordingsQueryResponse": { + "additionalProperties": false, + "properties": { + "has_next": { + "type": "boolean" + }, + "results": { + "items": { + "$ref": "#/definitions/SessionRecordingType" + }, + "type": "array" + } + }, + "required": ["results", "has_next"], + "type": "object" + }, "RefreshType": { "anyOf": [ { @@ -8597,6 +8949,88 @@ "required": ["key", "operator", "type"], "type": "object" }, + "SessionRecordingType": { + "additionalProperties": false, + "properties": { + "active_seconds": { + "type": "number" + }, + "click_count": { + "type": "number" + }, + "console_error_count": { + "type": "number" + }, + "console_log_count": { + "type": "number" + }, + "console_warn_count": { + "type": "number" + }, + "distinct_id": { + "type": "string" + }, + "email": { + "type": "string" + }, + "end_time": { + "description": "When the recording ends in ISO format.", + "type": "string" + }, + "id": { + "type": "string" + }, + "inactive_seconds": { + "type": "number" + }, + "keypress_count": { + "type": "number" + }, + "matching_events": { + "description": "List of matching events. *", + "items": { + "$ref": "#/definitions/MatchedRecording" + }, + "type": "array" + }, + "mouse_activity_count": { + "description": "count of all mouse activity in the recording, not just clicks", + "type": "number" + }, + "person": { + "$ref": "#/definitions/PersonType" + }, + "recording_duration": { + "description": "Length of recording in seconds.", + "type": "number" + }, + "snapshot_source": { + "enum": ["web", "mobile", "unknown"], + "type": "string" + }, + "start_time": { + "description": "When the recording starts in ISO format.", + "type": "string" + }, + "start_url": { + "type": "string" + }, + "storage": { + "description": "Where this recording information was loaded from", + "enum": ["object_storage_lts", "object_storage"], + "type": "string" + }, + "summary": { + "type": "string" + }, + "viewed": { + "description": "Whether this recording has been viewed already.", + "type": "boolean" + } + }, + "required": ["id", "viewed", "recording_duration", "start_time", "end_time", "snapshot_source"], + "type": "object" + }, "SessionsTimelineQuery": { "additionalProperties": false, "properties": { @@ -8870,6 +9304,7 @@ "session_properties", "hogql_expression", "notebooks", + "log_entries", "replay" ], "type": "string" @@ -9235,6 +9670,35 @@ "required": ["results"], "type": "object" }, + "UserBasicType": { + "additionalProperties": false, + "properties": { + "distinct_id": { + "type": "string" + }, + "email": { + "type": "string" + }, + "first_name": { + "type": "string" + }, + "hedgehog_config": { + "$ref": "#/definitions/MinimalHedgehogConfig" + }, + "id": { + "type": "number" + }, + "is_email_verified": {}, + "last_name": { + "type": "string" + }, + "uuid": { + "type": "string" + } + }, + "required": ["distinct_id", "email", "first_name", "id", "uuid"], + "type": "object" + }, "VizSpecificOptions": { "additionalProperties": false, "description": "Chart specific rendering options. Use ChartRenderingMetadata for non-serializable values, e.g. onClick handlers", diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index a0ba5618fbbcc..ee85f362b7472 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -9,8 +9,10 @@ import { ChartDisplayCategory, ChartDisplayType, CountPerActorMathType, + DurationType, EventPropertyFilter, EventType, + FilterLogicalOperator, FilterType, FunnelsFilterType, GroupMathType, @@ -20,14 +22,17 @@ import { IntervalType, LifecycleFilterType, LifecycleToggle, + LogEntryPropertyFilter, PathsFilterType, PersonPropertyFilter, PropertyGroupFilter, PropertyMathType, RetentionFilterType, SessionPropertyFilter, + SessionRecordingType, StickinessFilterType, TrendsFilterType, + UserBasicType, } from '~/types' export { ChartDisplayCategory } @@ -63,6 +68,7 @@ export enum NodeKind { FunnelsActorsQuery = 'FunnelsActorsQuery', FunnelCorrelationActorsQuery = 'FunnelCorrelationActorsQuery', SessionsTimelineQuery = 'SessionsTimelineQuery', + RecordingsQuery = 'RecordingsQuery', SessionAttributionExplorerQuery = 'SessionAttributionExplorerQuery', ErrorTrackingQuery = 'ErrorTrackingQuery', @@ -202,9 +208,9 @@ export interface HogQLQueryModifiers { debug?: boolean s3TableUseInvalidColumns?: boolean personsJoinMode?: 'inner' | 'left' - bounceRatePageViewMode?: 'count_pageviews' | 'uniq_urls' + bounceRatePageViewMode?: 'count_pageviews' | 'uniq_urls' | 'uniq_page_screen_autocaptures' sessionTableVersion?: 'auto' | 'v1' | 'v2' - propertyGroupsMode?: 'enabled' | 'disabled' + propertyGroupsMode?: 'enabled' | 'disabled' | 'optimized' } export interface DataWarehouseEventsModifier { @@ -264,6 +270,29 @@ export interface HogQuery extends DataNode { code?: string } +export interface RecordingsQueryResponse { + results: SessionRecordingType[] + has_next: boolean +} + +export interface RecordingsQuery extends DataNode { + kind: NodeKind.RecordingsQuery + date_from?: string | null + date_to?: string | null + events?: FilterType['events'] + actions?: FilterType['actions'] + properties?: AnyPropertyFilter[] + console_log_filters?: LogEntryPropertyFilter[] + having_predicates?: AnyPropertyFilter[] // duration and snapshot_source filters + filter_test_accounts?: boolean + operand?: FilterLogicalOperator + session_ids?: string[] + person_uuid?: string + order: DurationType | 'start_time' | 'console_error_count' + limit?: integer + offset?: integer +} + export interface HogQLNotice { start?: integer end?: integer @@ -494,6 +523,9 @@ export interface EventsQuery extends DataNode { orderBy?: string[] } +/** + * @deprecated Use `ActorsQuery` instead. + */ export interface PersonsNode extends DataNode { kind: NodeKind.PersonsNode search?: string @@ -1685,11 +1717,47 @@ export interface DashboardFilter { properties?: AnyPropertyFilter[] | null } -export interface AbsoluteThreshold { - lower?: number | null - upper?: number | null +export interface InsightsThresholdAbsolute { + lower?: number + upper?: number +} + +export interface InsightThreshold { + absoluteThreshold?: InsightsThresholdAbsolute + // More types of thresholds or conditions can be added here } -export interface AnomalyCondition { - absoluteThreshold: AbsoluteThreshold +export interface AlertCondition { + // Conditions in addition to the separate threshold + // TODO: Think about things like relative thresholds, rate of change, etc. +} + +export interface AlertCheck { + id: string + created_at: string + calculated_value: number + state: string + targets_notified: boolean +} + +export interface AlertTypeBase { + name: string + condition: AlertCondition + enabled: boolean + insight: number +} + +export interface AlertTypeWrite extends AlertTypeBase { + subscribed_users: integer[] +} + +export interface AlertType extends AlertTypeBase { + id: string + subscribed_users: UserBasicType[] + threshold: { configuration: InsightThreshold } + created_by: UserBasicType + created_at: string + state: string + last_notified_at: string + checks: AlertCheck[] } diff --git a/frontend/src/scenes/billing/billing-utils.ts b/frontend/src/scenes/billing/billing-utils.ts index bd721c680a37d..a8f008c0958f5 100644 --- a/frontend/src/scenes/billing/billing-utils.ts +++ b/frontend/src/scenes/billing/billing-utils.ts @@ -92,7 +92,7 @@ export const convertAmountToUsage = ( if (!amount) { return 0 } - if (!productAndAddonTiers) { + if (!productAndAddonTiers || productAndAddonTiers.length === 0) { return 0 } diff --git a/frontend/src/scenes/billing/billingProductLogic.ts b/frontend/src/scenes/billing/billingProductLogic.ts index 709fc8cad20c4..777925d1a812f 100644 --- a/frontend/src/scenes/billing/billingProductLogic.ts +++ b/frontend/src/scenes/billing/billingProductLogic.ts @@ -163,7 +163,10 @@ export const billingProductLogic = kea([ return product.usage_key ? billing?.custom_limits_usd?.[product.usage_key] ?? null : null }, ], - hasCustomLimitSet: [(s) => [s.customLimitUsd], (customLimitUsd) => !!customLimitUsd && customLimitUsd >= 0], + hasCustomLimitSet: [ + (s) => [s.customLimitUsd], + (customLimitUsd) => (!!customLimitUsd || customLimitUsd === 0) && customLimitUsd >= 0, + ], currentAndUpgradePlans: [ (_s, p) => [p.product], (product) => { @@ -209,7 +212,11 @@ export const billingProductLogic = kea([ productAndAddonTiers, billing?.discount_percent ) - : convertAmountToUsage(`${customLimitUsd}`, productAndAddonTiers, billing?.discount_percent) + : convertAmountToUsage( + customLimitUsd ? `${customLimitUsd}` : '', + productAndAddonTiers, + billing?.discount_percent + ) : 0 }, ], diff --git a/frontend/src/scenes/dashboard/Dashboard.tsx b/frontend/src/scenes/dashboard/Dashboard.tsx index 4eee95898ee97..d6576eb4fe20b 100644 --- a/frontend/src/scenes/dashboard/Dashboard.tsx +++ b/frontend/src/scenes/dashboard/Dashboard.tsx @@ -117,9 +117,9 @@ function DashboardScene(): JSX.Element { > {[DashboardPlacement.Public].includes(placement) ? ( - ) : ( + ) : !(dashboardMode === DashboardMode.Edit) ? ( - )} + ) : null} )} diff --git a/frontend/src/scenes/dashboard/DashboardEditBar.tsx b/frontend/src/scenes/dashboard/DashboardEditBar.tsx index 2dc6c481030d9..2fdf7c1c8767e 100644 --- a/frontend/src/scenes/dashboard/DashboardEditBar.tsx +++ b/frontend/src/scenes/dashboard/DashboardEditBar.tsx @@ -1,6 +1,4 @@ import { IconCalendar } from '@posthog/icons' -import { LemonButton, Popover } from '@posthog/lemon-ui' -import clsx from 'clsx' import { useActions, useValues } from 'kea' import { DateFilter } from 'lib/components/DateFilter/DateFilter' import { PropertyFilters } from 'lib/components/PropertyFilters/PropertyFilters' @@ -8,72 +6,55 @@ import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { dashboardLogic } from 'scenes/dashboard/dashboardLogic' import { groupsModel } from '~/models/groupsModel' +import { DashboardMode } from '~/types' export function DashboardEditBar(): JSX.Element { - const { dashboard, canEditDashboard, temporaryFilters, stale } = useValues(dashboardLogic) - const { setDates, setProperties, cancelTemporary, applyTemporary } = useActions(dashboardLogic) + const { dashboard, canEditDashboard, temporaryFilters, dashboardMode } = useValues(dashboardLogic) + const { setDates, setProperties, setDashboardMode } = useActions(dashboardLogic) const { groupsTaxonomicTypes } = useValues(groupsModel) - const isEditInProgress: boolean = canEditDashboard && stale const disabledReason = !canEditDashboard ? "You don't have permission to edit this dashboard" : undefined return ( - - - Cancel changes - - - Apply and save dashboard - - - } - placement="right" - showArrow - > -
+ { + if (dashboardMode !== DashboardMode.Edit) { + setDashboardMode(DashboardMode.Edit, null) + } + setDates(from_date, to_date) + }} + disabledReason={disabledReason} + makeLabel={(key) => ( + <> + + {key} + )} - > - ( - <> - - {key} - - )} - /> - -
-
+ /> + { + if (dashboardMode !== DashboardMode.Edit) { + setDashboardMode(DashboardMode.Edit, null) + } + setProperties(properties) + }} + pageKey={'dashboard_' + dashboard?.id} + propertyFilters={temporaryFilters.properties} + taxonomicGroupTypes={[ + TaxonomicFilterGroupType.EventProperties, + TaxonomicFilterGroupType.PersonProperties, + TaxonomicFilterGroupType.EventFeatureFlags, + ...groupsTaxonomicTypes, + TaxonomicFilterGroupType.Cohorts, + TaxonomicFilterGroupType.Elements, + TaxonomicFilterGroupType.HogQLExpression, + ]} + /> + ) } diff --git a/frontend/src/scenes/dashboard/DashboardHeader.tsx b/frontend/src/scenes/dashboard/DashboardHeader.tsx index 65ce4cdaeeb74..95bbb0c480e6a 100644 --- a/frontend/src/scenes/dashboard/DashboardHeader.tsx +++ b/frontend/src/scenes/dashboard/DashboardHeader.tsx @@ -120,19 +120,33 @@ export function DashboardHeader(): JSX.Element | null { setDashboardMode(null, DashboardEventSource.DashboardHeader)} - tabIndex={10} - disabled={dashboardLoading} - > - Done editing - + <> + + setDashboardMode(null, DashboardEventSource.DashboardHeaderDiscardChanges) + } + tabIndex={9} + > + Cancel + + + setDashboardMode(null, DashboardEventSource.DashboardHeaderSaveDashboard) + } + tabIndex={10} + disabled={dashboardLoading} + > + Save + + ) : dashboardMode === DashboardMode.Fullscreen ? ( setDashboardMode(null, DashboardEventSource.DashboardHeader)} + onClick={() => setDashboardMode(null, DashboardEventSource.DashboardHeaderExitFullscreen)} data-attr="dashboard-exit-presentation-mode" disabled={dashboardLoading} > diff --git a/frontend/src/scenes/dashboard/DashboardItems.tsx b/frontend/src/scenes/dashboard/DashboardItems.tsx index 552809a5e3214..fccd51a9885f5 100644 --- a/frontend/src/scenes/dashboard/DashboardItems.tsx +++ b/frontend/src/scenes/dashboard/DashboardItems.tsx @@ -26,7 +26,6 @@ export function DashboardItems(): JSX.Element { highlightedInsightId, refreshStatus, canEditDashboard, - stale, itemsLoading, } = useValues(dashboardLogic) const { @@ -139,7 +138,6 @@ export function DashboardItems(): JSX.Element { { logic.mount() }) - it('saving layouts with no provided tiles updates all tiles', async () => { + it('saving layouts creates api call with all tiles', async () => { + await expectLogic(logic).toFinishAllListeners() + jest.spyOn(api, 'update') await expectLogic(logic, () => { - logic.actions.saveLayouts() + logic.actions.updateFiltersAndLayouts() }).toFinishAllListeners() expect(api.update).toHaveBeenCalledWith(`api/projects/${MOCK_TEAM_ID}/dashboards/5`, { @@ -322,23 +324,11 @@ describe('dashboardLogic', () => { layouts: {}, }, ], - }) - }) - - it('saving layouts with provided tiles updates only those tiles', async () => { - jest.spyOn(api, 'update') - - await expectLogic(logic, () => { - logic.actions.saveLayouts([{ id: 1, layouts: { sm: {} as TileLayout, xs: {} as TileLayout } }]) - }).toFinishAllListeners() - - expect(api.update).toHaveBeenCalledWith(`api/projects/${MOCK_TEAM_ID}/dashboards/5`, { - tiles: [ - { - id: 1, - layouts: { sm: {} as TileLayout, xs: {} as TileLayout }, - }, - ], + filters: { + date_from: null, + date_to: null, + properties: [], + }, }) }) }) diff --git a/frontend/src/scenes/dashboard/dashboardLogic.tsx b/frontend/src/scenes/dashboard/dashboardLogic.tsx index 84f9e3ad5aa63..908b778ebb311 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.tsx +++ b/frontend/src/scenes/dashboard/dashboardLogic.tsx @@ -21,7 +21,7 @@ import { captureTimeToSeeData, currentSessionId, TimeToSeeDataPayload } from 'li import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' import { Link } from 'lib/lemon-ui/Link' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import { clearDOMTextSelection, isAbortedRequest, isUserLoggedIn, shouldCancelQuery, toParams, uuid } from 'lib/utils' +import { clearDOMTextSelection, isAbortedRequest, shouldCancelQuery, toParams, uuid } from 'lib/utils' import { DashboardEventSource, eventUsageLogic } from 'lib/utils/eventUsageLogic' import { Layout, Layouts } from 'react-grid-layout' import { calculateLayouts } from 'scenes/dashboard/tileLayouts' @@ -137,13 +137,15 @@ async function getSingleInsight( dashboardId: number, queryId: string, refresh: RefreshType, - methodOptions?: ApiMethodOptions + methodOptions?: ApiMethodOptions, + filtersOverride?: DashboardFilter ): Promise { const apiUrl = `api/projects/${currentTeamId}/insights/${insight.id}/?${toParams({ refresh, from_dashboard: dashboardId, // needed to load insight in correct context client_query_id: queryId, session_id: currentSessionId(), + ...(filtersOverride ? { filters_override: filtersOverride } : {}), })}` const insightResponse: Response = await api.getResponse(apiUrl, methodOptions) const legacyInsight: InsightModel | null = await getJSONOrNull(insightResponse) @@ -169,12 +171,17 @@ export const dashboardLogic = kea([ actions({ loadDashboard: (payload: { refresh?: RefreshType - action: 'initial_load' | 'update' | 'refresh' | 'load_missing' | 'refresh_insights_on_filters_updated' + action: + | 'initial_load' + | 'update' + | 'refresh' + | 'load_missing' + | 'refresh_insights_on_filters_updated' + | 'preview' }) => payload, triggerDashboardUpdate: (payload) => ({ payload }), /** The current state in which the dashboard is being viewed, see DashboardMode. */ setDashboardMode: (mode: DashboardMode | null, source: DashboardEventSource | null) => ({ mode, source }), - saveLayouts: (tilesToSave: DashboardTileLayoutUpdatePayload[] = []) => ({ tilesToSave }), updateLayouts: (layouts: Layouts) => ({ layouts }), updateContainerWidth: (containerWidth: number, columns: number) => ({ containerWidth, columns }), updateTileColor: (tileId: number, color: string | null) => ({ tileId, color }), @@ -194,7 +201,7 @@ export const dashboardLogic = kea([ date_to, }), setProperties: (properties: AnyPropertyFilter[] | null) => ({ properties }), - setFilters: (filters: DashboardFilter) => ({ filters }), + setFiltersAndLayouts: (filters: DashboardFilter) => ({ filters }), setAutoRefresh: (enabled: boolean, interval: number) => ({ enabled, interval }), setRefreshStatus: (shortId: InsightShortId, loading = false, queued = false) => ({ shortId, loading, queued }), setRefreshStatuses: (shortIds: InsightShortId[], loading = false, queued = false) => ({ @@ -226,8 +233,7 @@ export const dashboardLogic = kea([ setInitialLoadResponseBytes: (responseBytes: number) => ({ responseBytes }), abortQuery: (payload: { dashboardQueryId: string; queryId: string; queryStartTime: number }) => payload, abortAnyRunningQuery: true, - applyTemporary: true, - cancelTemporary: true, + updateFiltersAndLayouts: true, }), loaders(({ actions, props, values }) => ({ @@ -240,12 +246,34 @@ export const dashboardLogic = kea([ await breakpoint(200) try { - const apiUrl = values.apiUrl(refresh || 'async') + const apiUrl = values.apiUrl( + refresh || 'async', + action === 'preview' ? values.temporaryFilters : undefined + ) const dashboardResponse: Response = await api.getResponse(apiUrl) const dashboard: DashboardType | null = await getJSONOrNull(dashboardResponse) actions.setInitialLoadResponseBytes(getResponseBytes(dashboardResponse)) + // don't update dashboard tile layouts if we're previewing + // we want to retain what the user has temporarily set + if (action === 'preview' && dashboard) { + const editModeTileLayouts: Record = {} + values.dashboard?.tiles.forEach((tile: DashboardTile) => { + editModeTileLayouts[tile.id] = tile.layouts + }) + + const tilesWithPreviousLayouts = dashboard.tiles.map((tile) => ({ + ...tile, + layouts: editModeTileLayouts?.[tile.id], + })) + + return getQueryBasedDashboard({ + ...dashboard, + tiles: tilesWithPreviousLayouts, + }) + } + return getQueryBasedDashboard(dashboard) } catch (error: any) { if (error.status === 404) { @@ -254,19 +282,27 @@ export const dashboardLogic = kea([ throw error } }, - updateFilters: async () => { + updateFiltersAndLayouts: async (_, breakpoint) => { actions.abortAnyRunningQuery() try { + const layoutsToUpdate = (values.dashboard?.tiles || []).map((tile) => ({ + id: tile.id, + layouts: tile.layouts, + })) + + breakpoint() + const dashboard: DashboardType = await api.update( `api/projects/${values.currentTeamId}/dashboards/${props.id}`, { filters: values.filters, + tiles: layoutsToUpdate, } ) return getQueryBasedDashboard(dashboard) } catch (e) { - lemonToast.error('Could not update dashboardFilters: ' + String(e)) + lemonToast.error('Could not update dashboard: ' + String(e)) return values.dashboard } }, @@ -299,6 +335,23 @@ export const dashboardLogic = kea([ return values.dashboard } }, + setDashboardMode: async ({ mode, source }) => { + if ( + mode === null && + source === DashboardEventSource.DashboardHeaderDiscardChanges && + values.dashboard?.tiles + ) { + // layout changes were discarded so need to reset to original state + const restoredTiles = values.dashboard?.tiles?.map((tile) => ({ + ...tile, + layouts: values.dashboardLayouts?.[tile.id], + })) + + values.dashboard.tiles = restoredTiles + } + + return values.dashboard + }, duplicateTile: async ({ tile }) => { try { const newTile = { ...tile } as Partial> @@ -340,6 +393,14 @@ export const dashboardLogic = kea([ ], })), reducers(({ props }) => ({ + _dashboardLoading: [ + false, + { + loadDashboard: () => true, + loadDashboardSuccess: () => false, + loadDashboardFailure: () => false, + }, + ], pageVisibility: [ true, { @@ -353,6 +414,24 @@ export const dashboardLogic = kea([ loadDashboardFailure: () => true, }, ], + dashboardLayouts: [ + {} as Record, + { + loadDashboardSuccess: (state, { dashboard, payload }) => { + // don't update dashboardLayouts if we're previewing + if (payload?.action === 'preview') { + return state + } + + const tileIdToLayouts: Record = {} + dashboard?.tiles.forEach((tile: DashboardTile) => { + tileIdToLayouts[tile.id] = tile.layouts + }) + + return tileIdToLayouts + }, + }, + ], temporaryFilters: [ { date_from: null, @@ -375,7 +454,7 @@ export const dashboardLogic = kea([ ...state, date_from: dashboard?.filters.date_from || null, date_to: dashboard?.filters.date_to || null, - properties: dashboard?.filters.properties || null, + properties: dashboard?.filters.properties || [], } : state, }, @@ -387,17 +466,22 @@ export const dashboardLogic = kea([ properties: null, } as DashboardFilter, { - setFilters: (state, { filters }) => ({ + setFiltersAndLayouts: (state, { filters }) => ({ ...state, ...filters, }), - loadDashboardSuccess: (state, { dashboard }) => + loadDashboardSuccess: (state, { dashboard, payload }) => dashboard ? { ...state, - date_from: dashboard?.filters.date_from || null, - date_to: dashboard?.filters.date_to || null, - properties: dashboard?.filters.properties || [], + // don't update filters if we're previewing + ...(payload?.action === 'preview' + ? {} + : { + date_from: dashboard?.filters.date_from || null, + date_to: dashboard?.filters.date_to || null, + properties: dashboard?.filters.properties || [], + }), } : state, }, @@ -647,9 +731,10 @@ export const dashboardLogic = kea([ apiUrl: [ () => [(_, props) => props.id], (id) => { - return (refresh?: RefreshType) => + return (refresh?: RefreshType, filtersOverride?: DashboardFilter) => `api/projects/${teamLogic.values.currentTeamId}/dashboards/${id}/?${toParams({ refresh, + filters_override: filtersOverride, })}` }, ], @@ -660,7 +745,7 @@ export const dashboardLogic = kea([ ], textTiles: [(s) => [s.tiles], (tiles) => tiles.filter((t) => !!t.text)], itemsLoading: [ - (s) => [s.dashboardLoading, s.refreshStatus], + (s) => [s._dashboardLoading, s.refreshStatus], (dashboardLoading, refreshStatus) => { return dashboardLoading || Object.values(refreshStatus).some((s) => s.loading || s.queued) }, @@ -777,7 +862,7 @@ export const dashboardLogic = kea([ }, ], breadcrumbs: [ - (s) => [s.dashboard, s.dashboardLoading, s.dashboardFailedToLoad], + (s) => [s.dashboard, s._dashboardLoading, s.dashboardFailedToLoad], (dashboard, dashboardLoading, dashboardFailedToLoad): Breadcrumb[] => [ { key: Scene.Dashboards, @@ -822,22 +907,6 @@ export const dashboardLogic = kea([ }) }, ], - stale: [ - (s) => [s.temporaryFilters, s.dashboard], - (temporaryFilters, dashboard) => { - const isDateFromStale = - !!(temporaryFilters.date_from || dashboard?.filters.date_from) && - temporaryFilters.date_from !== dashboard?.filters.date_from - const isDateToStale = - !!(temporaryFilters.date_to || dashboard?.filters.date_to) && - temporaryFilters.date_to !== dashboard?.filters.date_to - const isPropertiesStale = - !!(temporaryFilters.properties || dashboard?.filters.properties) && - JSON.stringify(temporaryFilters.properties) !== JSON.stringify(dashboard?.filters.properties) - - return isDateFromStale || isDateToStale || isPropertiesStale - }, - ], })), events(({ actions, cache, props }) => ({ afterMount: () => { @@ -878,7 +947,7 @@ export const dashboardLogic = kea([ }, })), listeners(({ actions, values, cache, props, sharedListeners }) => ({ - updateFiltersSuccess: () => { + updateFiltersAndLayoutsSuccess: () => { actions.loadDashboard({ action: 'update' }) }, setRefreshError: sharedListeners.reportRefreshTiming, @@ -913,25 +982,6 @@ export const dashboardLogic = kea([ actions.loadDashboard({ action: 'update' }) } }, - updateLayouts: () => { - actions.saveLayouts() - }, - saveLayouts: async ({ tilesToSave }, breakpoint) => { - await breakpoint(300) - if (!isUserLoggedIn()) { - // If user is anonymous (i.e. viewing a shared dashboard logged out), we don't save any layout changes. - return - } - const layoutsToUpdate = tilesToSave.length - ? tilesToSave - : (values.dashboard?.tiles || []).map((tile) => ({ id: tile.id, layouts: tile.layouts })) - - breakpoint() - - return await api.update(`api/projects/${values.currentTeamId}/dashboards/${props.id}`, { - tiles: layoutsToUpdate, - }) - }, moveToDashboardSuccess: ({ payload }) => { if (payload?.toDashboard === undefined || payload?.tile === undefined) { return @@ -1030,15 +1080,17 @@ export const dashboardLogic = kea([ let refreshesFinished = 0 const totalResponseBytes = 0 - // array of functions that reload each item + // array of functions that reload each insight const fetchItemFunctions = insightsToRefresh.map((insight) => async () => { - const queryId = `${dashboardQueryId}::${uuid()}` + // dashboard refresh or insight refresh will have been triggered first + // so we should have a query_id to poll for + const queryId = insight?.query_status?.id const queryStartTime = performance.now() try { breakpoint() - if (insight.query_status) { - await pollForResults(insight.query_status.id, false, methodOptions) + if (queryId) { + await pollForResults(queryId, false, methodOptions) const currentTeamId = values.currentTeamId // TODO: Check and remove - We get the insight again here to get everything in the right format (e.g. because of result vs results) const polledInsight = await getSingleInsight( @@ -1047,8 +1099,14 @@ export const dashboardLogic = kea([ dashboardId, queryId, 'force_cache', - methodOptions + methodOptions, + action === 'preview' ? values.temporaryFilters : undefined ) + + if (action === 'preview' && polledInsight!.dashboard_tiles) { + // if we're previewing, only update the insight on this dashboard + polledInsight!.dashboards = [dashboardId] + } dashboardsModel.actions.updateDashboardInsight(polledInsight!) actions.setRefreshStatus(insight.short_id) } @@ -1056,9 +1114,9 @@ export const dashboardLogic = kea([ if (isBreakpoint(e)) { cancelled = true } else if (shouldCancelQuery(e)) { - if (!cancelled) { + if (!cancelled && queryId) { // cancel all insight requests for this query in one go - actions.abortQuery({ dashboardQueryId: dashboardQueryId, queryId: queryId, queryStartTime }) + actions.abortQuery({ dashboardQueryId: dashboardQueryId, queryId, queryStartTime }) } if (isAbortedRequest(e)) { cancelled = true @@ -1101,14 +1159,29 @@ export const dashboardLogic = kea([ eventUsageLogic.actions.reportDashboardRefreshed(dashboardId, values.newestRefreshed) }, - setFilters: ({ filters: { date_from, date_to } }) => { - actions.updateFilters() + setFiltersAndLayouts: ({ filters: { date_from, date_to } }) => { + actions.updateFiltersAndLayouts() eventUsageLogic.actions.reportDashboardDateRangeChanged(date_from, date_to) eventUsageLogic.actions.reportDashboardPropertiesChanged() }, setDashboardMode: async ({ mode, source }) => { if (mode === DashboardMode.Edit) { clearDOMTextSelection() + lemonToast.info('Now editing the dashboard – save to persist changes') + } else if (mode === null) { + if (source === DashboardEventSource.DashboardHeaderDiscardChanges) { + // cancel edit mode changes + + // reset filters to that before previewing + actions.setDates(values.filters.date_from ?? null, values.filters.date_to ?? null) + actions.setProperties(values.filters.properties ?? null) + + // also reset layout to that we stored in dashboardLayouts + // this is done in the reducer for dashboard + } else if (source === DashboardEventSource.DashboardHeaderSaveDashboard) { + // save edit mode changes + actions.setFiltersAndLayouts(values.temporaryFilters) + } } if (mode) { @@ -1152,7 +1225,7 @@ export const dashboardLogic = kea([ const initialLoad = action === 'initial_load' const allLoaded = false // TODO: Check this - actions.refreshAllDashboardItems({ action: 'refresh', initialLoad, dashboardQueryId }) + actions.refreshAllDashboardItems({ action, initialLoad, dashboardQueryId }) const payload: TimeToSeeDataPayload = { type: 'dashboard_load', @@ -1226,12 +1299,11 @@ export const dashboardLogic = kea([ insights_fetched_cached: 0, }) }, - applyTemporary: () => { - actions.setFilters(values.temporaryFilters) + setProperties: () => { + actions.loadDashboard({ action: 'preview' }) }, - cancelTemporary: () => { - actions.setDates(values.dashboard?.filters.date_from ?? null, values.dashboard?.filters.date_to ?? null) - actions.setProperties(values.dashboard?.filters.properties ?? null) + setDates: () => { + actions.loadDashboard({ action: 'preview' }) }, })), diff --git a/frontend/src/scenes/data-warehouse/new/dataWarehouseTableLogic.tsx b/frontend/src/scenes/data-warehouse/new/dataWarehouseTableLogic.tsx index 7f989a84f2428..92165933b107f 100644 --- a/frontend/src/scenes/data-warehouse/new/dataWarehouseTableLogic.tsx +++ b/frontend/src/scenes/data-warehouse/new/dataWarehouseTableLogic.tsx @@ -92,6 +92,13 @@ export const dataWarehouseTableLogic = kea([ table: { defaults: { ...NEW_WAREHOUSE_TABLE } as DataWarehouseTable, errors: ({ name, url_pattern, credential, format }) => { + if (url_pattern?.startsWith('s3://')) { + return { + url_pattern: + 'Please use the https version of your bucket url e.g. https://your-org.s3.amazonaws.com/airbyte/stripe/invoices/*.pqt', + } + } + return { name: !name && 'Please enter a name.', url_pattern: !url_pattern && 'Please enter a url pattern.', diff --git a/frontend/src/scenes/debug/Modifiers.tsx b/frontend/src/scenes/debug/Modifiers.tsx index b3291de229596..ee4f0175a724c 100644 --- a/frontend/src/scenes/debug/Modifiers.tsx +++ b/frontend/src/scenes/debug/Modifiers.tsx @@ -121,6 +121,7 @@ export function Modifiers; modifiers? options={[ { value: 'enabled', label: 'Enabled' }, { value: 'disabled', label: 'Disabled' }, + { value: 'optimized', label: 'Enabled, with Optimizations' }, ]} onChange={(value) => setQuery({ diff --git a/frontend/src/scenes/early-access-features/EarlyAccessFeature.tsx b/frontend/src/scenes/early-access-features/EarlyAccessFeature.tsx index ea4efe8144c9e..50e9809a930e6 100644 --- a/frontend/src/scenes/early-access-features/EarlyAccessFeature.tsx +++ b/frontend/src/scenes/early-access-features/EarlyAccessFeature.tsx @@ -445,7 +445,7 @@ function PersonsTableByFilter({ recordingsFilters, properties }: PersonsTableByF const [query, setQuery] = useState({ kind: NodeKind.DataTableNode, source: { - kind: NodeKind.PersonsNode, + kind: NodeKind.ActorsQuery, fixedProperties: properties, }, full: true, diff --git a/frontend/src/scenes/experiments/ExperimentResult.tsx b/frontend/src/scenes/experiments/ExperimentResult.tsx index 69d0cc9aab959..4e29cfff81362 100644 --- a/frontend/src/scenes/experiments/ExperimentResult.tsx +++ b/frontend/src/scenes/experiments/ExperimentResult.tsx @@ -1,7 +1,7 @@ import './Experiment.scss' -import { IconArchive, IconInfo } from '@posthog/icons' -import { LemonTable, Tooltip } from '@posthog/lemon-ui' +import { IconArchive } from '@posthog/icons' +import { LemonTable } from '@posthog/lemon-ui' import { useValues } from 'kea' import { EntityFilterInfo } from 'lib/components/EntityFilterInfo' import { FunnelLayout } from 'lib/constants' @@ -31,7 +31,6 @@ export function ExperimentResult({ secondaryMetricId }: ExperimentResultProps): secondaryMetricResultsLoading, conversionRateForVariant, getIndexForVariant, - areTrendResultsConfusing, sortedExperimentResultVariants, experimentMathAggregationForTrends, } = useValues(experimentLogic) @@ -166,15 +165,7 @@ export function ExperimentResult({ secondaryMetricId }: ExperimentResultProps): {' '} - {countDataForVariant(targetResults, variant)}{' '} - {areTrendResultsConfusing && idx === 0 && ( - - - - )} + {countDataForVariant(targetResults, variant)}
Exposure:{' '} diff --git a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx index 2bc468cd7ee30..c97a94127bcd8 100644 --- a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx @@ -5,6 +5,7 @@ import { LemonTable, LemonTableColumns, Tooltip } from '@posthog/lemon-ui' import { useValues } from 'kea' import { EntityFilterInfo } from 'lib/components/EntityFilterInfo' import { LemonProgress } from 'lib/lemon-ui/LemonProgress' +import { humanFriendlyNumber } from 'lib/utils' import { _FunnelExperimentResults, @@ -27,7 +28,6 @@ export function SummaryTable(): JSX.Element { conversionRateForVariant, experimentMathAggregationForTrends, countDataForVariant, - areTrendResultsConfusing, getHighestProbabilityVariant, } = useValues(experimentLogic) @@ -64,27 +64,25 @@ export function SummaryTable(): JSX.Element {
), - render: function Key(_, item, index): JSX.Element { - return ( -
- {countDataForVariant(experimentResults, item.key)}{' '} - {areTrendResultsConfusing && index === 0 && ( - - - - )} -
- ) + render: function Key(_, variant): JSX.Element { + const count = countDataForVariant(experimentResults, variant.key) + if (!count) { + return <>— + } + + return
{humanFriendlyNumber(count)}
}, }) columns.push({ key: 'exposure', title: 'Exposure', render: function Key(_, variant): JSX.Element { - return
{exposureCountDataForVariant(experimentResults, variant.key)}
+ const exposure = exposureCountDataForVariant(experimentResults, variant.key) + if (!exposure) { + return <>— + } + + return
{humanFriendlyNumber(exposure)}
}, }) columns.push({ diff --git a/frontend/src/scenes/experiments/ExperimentView/components.tsx b/frontend/src/scenes/experiments/ExperimentView/components.tsx index b618e432692d1..1db1f9b9d93ad 100644 --- a/frontend/src/scenes/experiments/ExperimentView/components.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/components.tsx @@ -1,10 +1,9 @@ import '../Experiment.scss' -import { IconArchive, IconCheck, IconInfo, IconMagicWand, IconX } from '@posthog/icons' +import { IconArchive, IconCheck, IconFlask, IconX } from '@posthog/icons' import { LemonBanner, LemonButton, - LemonCheckbox, LemonDialog, LemonDivider, LemonModal, @@ -159,7 +158,7 @@ export function ExploreButton({ icon = }: { icon?: JSX.Element return ( - } - onClick={() => openMakeDecisionModal()} - > - Make decision - - + + } + onClick={() => openShipVariantModal()} + > + Ship a variant + + + )} @@ -492,15 +492,12 @@ export function PageHeaderCustom(): JSX.Element { ) } -export function MakeDecisionModal({ experimentId }: { experimentId: Experiment['id'] }): JSX.Element { - const { experiment, sortedWinProbabilities, isMakeDecisionModalOpen, isExperimentStopped } = useValues( - experimentLogic({ experimentId }) - ) - const { closeMakeDecisionModal, shipVariant } = useActions(experimentLogic({ experimentId })) +export function ShipVariantModal({ experimentId }: { experimentId: Experiment['id'] }): JSX.Element { + const { experiment, sortedWinProbabilities, isShipVariantModalOpen } = useValues(experimentLogic({ experimentId })) + const { closeShipVariantModal, shipVariant } = useActions(experimentLogic({ experimentId })) const { aggregationLabel } = useValues(groupsModel) const [selectedVariantKey, setSelectedVariantKey] = useState() - const [shouldStopExperiment, setShouldStopExperiment] = useState(true) useEffect(() => setSelectedVariantKey(sortedWinProbabilities[0]?.key), [sortedWinProbabilities]) const aggregationTargetName = @@ -510,17 +507,19 @@ export function MakeDecisionModal({ experimentId }: { experimentId: Experiment[' return ( - + Cancel shipVariant({ selectedVariantKey, shouldStopExperiment })} + // TODO: revisit if it always makes sense to stop the experiment when shipping a variant + // does it make sense to still *monitor* the experiment after shipping the variant? + onClick={() => shipVariant({ selectedVariantKey, shouldStopExperiment: true })} type="primary" > Ship variant @@ -530,7 +529,8 @@ export function MakeDecisionModal({ experimentId }: { experimentId: Experiment[' >
- This action will roll out the selected variant to 100% of {aggregationTargetName}. + This will roll out the selected variant to 100% of {aggregationTargetName} and stop the + experiment.
@@ -554,29 +554,6 @@ export function MakeDecisionModal({ experimentId }: { experimentId: Experiment[' }))} />
- {!isExperimentStopped && ( - <> - -
- -
Stop experiment
- - - -
- } - onChange={() => setShouldStopExperiment(!shouldStopExperiment)} - checked={shouldStopExperiment} - /> -
- - )}
For more precise control over your release, adjust the rollout percentage and release conditions in diff --git a/frontend/src/scenes/experiments/experimentLogic.tsx b/frontend/src/scenes/experiments/experimentLogic.tsx index 9f35001c8b175..bdcd2b51d4f62 100644 --- a/frontend/src/scenes/experiments/experimentLogic.tsx +++ b/frontend/src/scenes/experiments/experimentLogic.tsx @@ -165,8 +165,8 @@ export const experimentLogic = kea([ closeExperimentExposureModal: true, openExperimentCollectionGoalModal: true, closeExperimentCollectionGoalModal: true, - openMakeDecisionModal: true, - closeMakeDecisionModal: true, + openShipVariantModal: true, + closeShipVariantModal: true, setCurrentFormStep: (stepIndex: number) => ({ stepIndex }), moveToNextFormStep: true, }), @@ -305,11 +305,11 @@ export const experimentLogic = kea([ closeExperimentCollectionGoalModal: () => false, }, ], - isMakeDecisionModalOpen: [ + isShipVariantModalOpen: [ false, { - openMakeDecisionModal: () => true, - closeMakeDecisionModal: () => false, + openShipVariantModal: () => true, + closeShipVariantModal: () => false, }, ], experimentValuesChangedLocally: [ @@ -706,8 +706,8 @@ export const experimentLogic = kea([ }, shipVariantSuccess: ({ payload }) => { lemonToast.success('The selected variant has been shipped') - actions.closeMakeDecisionModal() - if (payload.shouldStopExperiment) { + actions.closeShipVariantModal() + if (payload.shouldStopExperiment && !values.isExperimentStopped) { actions.endExperiment() } actions.loadExperiment() @@ -715,7 +715,7 @@ export const experimentLogic = kea([ }, shipVariantFailure: ({ error }) => { lemonToast.error(error) - actions.closeMakeDecisionModal() + actions.closeShipVariantModal() }, })), loaders(({ actions, props, values }) => ({ @@ -1151,19 +1151,18 @@ export const experimentLogic = kea([ countDataForVariant: [ (s) => [s.experimentMathAggregationForTrends], (experimentMathAggregationForTrends) => - (experimentResults: Partial | null, variant: string): string => { + (experimentResults: Partial | null, variant: string): number | null => { const usingMathAggregationType = experimentMathAggregationForTrends( experimentResults?.filters || {} ) - const errorResult = '--' if (!experimentResults || !experimentResults.insight) { - return errorResult + return null } const variantResults = (experimentResults.insight as TrendResult[]).find( (variantTrend: TrendResult) => variantTrend.breakdown_value === variant ) if (!variantResults) { - return errorResult + return null } let result = variantResults.count @@ -1190,35 +1189,26 @@ export const experimentLogic = kea([ } } - if (result % 1 !== 0) { - // not an integer, so limit to 2 digits post decimal - return result.toFixed(2) - } - return result.toString() + return result }, ], exposureCountDataForVariant: [ () => [], () => - (experimentResults: Partial | null, variant: string): string => { - const errorResult = '--' + (experimentResults: Partial | null, variant: string): number | null => { if (!experimentResults || !experimentResults.variants) { - return errorResult + return null } const variantResults = (experimentResults.variants as TrendExperimentVariant[]).find( (variantTrend: TrendExperimentVariant) => variantTrend.key === variant ) if (!variantResults || !variantResults.absolute_exposure) { - return errorResult + return null } const result = variantResults.absolute_exposure - if (result % 1 !== 0) { - // not an integer, so limit to 2 digits post decimal - return result.toFixed(2) - } - return result.toString() + return result }, ], getHighestProbabilityVariant: [ @@ -1232,29 +1222,6 @@ export const experimentLogic = kea([ } }, ], - areTrendResultsConfusing: [ - (s) => [s.experimentResults, s.getHighestProbabilityVariant], - (experimentResults, getHighestProbabilityVariant): boolean => { - // Results are confusing when the top variant has a lower - // absolute count than other variants. This happens because - // exposure is invisible to the user - if (!experimentResults) { - return false - } - - // find variant with highest count - const variantResults: TrendResult = (experimentResults?.insight as TrendResult[]).reduce( - (bestVariant, currentVariant) => - currentVariant.count > bestVariant.count ? currentVariant : bestVariant, - { count: 0, breakdown_value: '' } as TrendResult - ) - if (!variantResults.count) { - return false - } - - return variantResults.breakdown_value !== getHighestProbabilityVariant(experimentResults) - }, - ], sortedExperimentResultVariants: [ (s) => [s.experimentResults, s.experiment], (experimentResults, experiment): string[] => { diff --git a/frontend/src/scenes/insights/InsightPageHeader.tsx b/frontend/src/scenes/insights/InsightPageHeader.tsx index 8d44c02581861..86e730efafe5b 100644 --- a/frontend/src/scenes/insights/InsightPageHeader.tsx +++ b/frontend/src/scenes/insights/InsightPageHeader.tsx @@ -46,15 +46,9 @@ export function InsightPageHeader({ insightLogicProps }: { insightLogicProps: In const { setInsightMode } = useActions(insightSceneLogic) // insightLogic - const { - insightProps, - canEditInsight, - insight, - queryBasedInsightSaving, - insightChanged, - insightSaving, - hasDashboardItemId, - } = useValues(insightLogic(insightLogicProps)) + const { insightProps, canEditInsight, insight, insightChanged, insightSaving, hasDashboardItemId } = useValues( + insightLogic(insightLogicProps) + ) const { setInsightMetadata, saveAs, saveInsight } = useActions(insightLogic(insightLogicProps)) // savedInsightsLogic @@ -84,7 +78,7 @@ export function InsightPageHeader({ insightLogicProps }: { insightLogicProps: In isOpen={insightMode === ItemMode.Subscriptions} closeModal={() => push(urls.insightView(insight.short_id as InsightShortId))} insightShortId={insight.short_id} - subscriptionId={itemId} + subscriptionId={typeof itemId === 'number' || itemId === 'new' ? itemId : null} /> push(urls.insightView(insight.short_id as InsightShortId))} isOpen={insightMode === ItemMode.Alerts} insightLogicProps={insightLogicProps} + insightId={insight.id as number} insightShortId={insight.short_id as InsightShortId} - alertId={itemId} + alertId={typeof itemId === 'string' ? itemId : null} /> @@ -156,7 +151,6 @@ export function InsightPageHeader({ insightLogicProps }: { insightLogicProps: In Share or embed
- {exportContext ? ( ) : null} + )} @@ -248,10 +243,6 @@ export function InsightPageHeader({ insightLogicProps }: { insightLogicProps: In loadInsights() push(urls.savedInsights()) }, - options: { - writeAsQuery: queryBasedInsightSaving, - readAsQuery: true, - }, }) } fullWidth diff --git a/frontend/src/scenes/insights/InsightTooltip/insightTooltipUtils.tsx b/frontend/src/scenes/insights/InsightTooltip/insightTooltipUtils.tsx index 58cff3667b28b..786a4d9872dc8 100644 --- a/frontend/src/scenes/insights/InsightTooltip/insightTooltipUtils.tsx +++ b/frontend/src/scenes/insights/InsightTooltip/insightTooltipUtils.tsx @@ -122,8 +122,11 @@ export function invertDataSource( if (pillValues.length > 0) { datumTitle = ( <> - {pillValues.map((pill) => ( - {midEllipsis(pill, 60)} + {pillValues.map((pill, index) => ( + <> + {midEllipsis(pill, 60)} + {index < pillValues.length - 1 && ' '} + ))} ) diff --git a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx index a1ecdd39c48c4..8480c57d1f4e5 100644 --- a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx +++ b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx @@ -346,7 +346,7 @@ export function ActionFilterRow({ } - // title="Delete graph series" + title="Delete graph series" data-attr={`delete-prop-filter-${index}`} noPadding={!enablePopup} onClick={() => { diff --git a/frontend/src/scenes/insights/insightDataLogic.tsx b/frontend/src/scenes/insights/insightDataLogic.tsx index b5f49ceef5c09..108e7dac0b1c4 100644 --- a/frontend/src/scenes/insights/insightDataLogic.tsx +++ b/frontend/src/scenes/insights/insightDataLogic.tsx @@ -1,6 +1,4 @@ import { actions, connect, kea, key, listeners, path, props, propsChanged, reducers, selectors } from 'kea' -import { FEATURE_FLAGS } from 'lib/constants' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { objectsEqual } from 'lib/utils' import { DATAWAREHOUSE_EDITOR_ITEM_ID } from 'scenes/data-warehouse/external/dataWarehouseExternalSceneLogic' import { keyForInsightLogicProps } from 'scenes/insights/sharedUtils' @@ -46,8 +44,6 @@ export const insightDataLogic = kea([ ], filterTestAccountsDefaultsLogic, ['filterTestAccountsDefault'], - featureFlagLogic, - ['featureFlags'], ], actions: [ insightLogic, @@ -87,11 +83,6 @@ export const insightDataLogic = kea([ }), selectors({ - useQueryDashboardCards: [ - (s) => [s.featureFlags], - (featureFlags) => !!featureFlags[FEATURE_FLAGS.QUERY_BASED_DASHBOARD_CARDS], - ], - query: [ (s) => [s.propsQuery, s.insight, s.internalQuery, s.filterTestAccountsDefault, s.isDataWarehouseQuery], (propsQuery, insight, internalQuery, filterTestAccountsDefault, isDataWarehouseQuery): Node | null => diff --git a/frontend/src/scenes/insights/insightLogic.tsx b/frontend/src/scenes/insights/insightLogic.tsx index 6edc322ed3171..8850f42ae2cee 100644 --- a/frontend/src/scenes/insights/insightLogic.tsx +++ b/frontend/src/scenes/insights/insightLogic.tsx @@ -2,10 +2,9 @@ import { LemonDialog, LemonInput } from '@posthog/lemon-ui' import { actions, connect, events, kea, key, listeners, LogicWrapper, path, props, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' import { router } from 'kea-router' -import { DashboardPrivilegeLevel, FEATURE_FLAGS } from 'lib/constants' +import { DashboardPrivilegeLevel } from 'lib/constants' import { LemonField } from 'lib/lemon-ui/LemonField' import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { objectsEqual } from 'lib/utils' import { eventUsageLogic, InsightEventSource } from 'lib/utils/eventUsageLogic' import { dashboardLogic } from 'scenes/dashboard/dashboardLogic' @@ -29,7 +28,7 @@ import { teamLogic } from '../teamLogic' import { insightDataLogic } from './insightDataLogic' import type { insightLogicType } from './insightLogicType' import { getInsightId } from './utils' -import { insightsApi, InsightsApiOptions } from './utils/api' +import { insightsApi } from './utils/api' export const UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES = 3 @@ -59,8 +58,6 @@ export const insightLogic: LogicWrapper = kea = kea { await breakpoint(100) - const insight = await insightsApi.getByShortId(shortId, { readAsQuery: true }, undefined, 'async') + const insight = await insightsApi.getByShortId(shortId, undefined, 'async') if (!insight) { throw new Error(`Insight with shortId ${shortId} not found`) @@ -113,10 +110,7 @@ export const insightLogic: LogicWrapper = kea = kea = kea { - const response = await insightsApi.update(values.insight.id as number, beforeUpdates, { - writeAsQuery: values.queryBasedInsightSaving, - readAsQuery: true, - }) + const response = await insightsApi.update(values.insight.id as number, beforeUpdates) savedInsightsLogic.findMounted()?.actions.loadInsights() dashboardsModel.actions.updateDashboardInsight(response) actions.setInsight(response, { overrideQuery: false, fromPersistentApi: true }) @@ -270,10 +258,6 @@ export const insightLogic: LogicWrapper = kea [(state) => insightDataLogic.findMounted(s.insightProps(state))?.values.query || null], (node): Node | null => node, ], - queryBasedInsightSaving: [ - (s) => [s.featureFlags], - (featureFlags) => !!featureFlags[FEATURE_FLAGS.QUERY_BASED_INSIGHTS_SAVING], - ], insightProps: [() => [(_, props) => props], (props): InsightLogicProps => props], isInDashboardContext: [() => [(_, props) => props], ({ dashboardId }) => !!dashboardId], hasDashboardItemId: [ @@ -336,13 +320,9 @@ export const insightLogic: LogicWrapper = kea = { - writeAsQuery: values.queryBasedInsightSaving, - readAsQuery: true, - } savedInsight = insightNumericId - ? await insightsApi.update(insightNumericId, insightRequest, options) - : await insightsApi.create(insightRequest, options) + ? await insightsApi.update(insightNumericId, insightRequest) + : await insightsApi.create(insightRequest) savedInsightsLogic.findMounted()?.actions.loadInsights() // Load insights afresh actions.saveInsightSuccess() } catch (e) { @@ -411,17 +391,11 @@ export const insightLogic: LogicWrapper = kea { - const insight = await insightsApi.create( - { - name, - query: values.query, - saved: true, - }, - { - writeAsQuery: values.queryBasedInsightSaving, - readAsQuery: true, - } - ) + const insight = await insightsApi.create({ + name, + query: values.query, + saved: true, + }) lemonToast.info( `You're now working on a copy of ${values.insight.name || values.insight.derived_name || name}` ) diff --git a/frontend/src/scenes/insights/insightSceneLogic.tsx b/frontend/src/scenes/insights/insightSceneLogic.tsx index 23b8ddb6e8c0c..cf1c390cd0138 100644 --- a/frontend/src/scenes/insights/insightSceneLogic.tsx +++ b/frontend/src/scenes/insights/insightSceneLogic.tsx @@ -71,10 +71,16 @@ export const insightSceneLogic = kea([ }, ], itemId: [ - null as null | number | 'new', + null as null | string | number, { setSceneState: (_, { itemId }) => - itemId !== undefined ? (itemId === 'new' ? 'new' : parseInt(itemId, 10)) : null, + itemId !== undefined + ? itemId === 'new' + ? 'new' + : Number.isInteger(+itemId) + ? parseInt(itemId, 10) + : itemId + : null, }, ], insightLogicRef: [ diff --git a/frontend/src/scenes/insights/utils/api.ts b/frontend/src/scenes/insights/utils/api.ts index 2665baa7e8a64..d35c3b85a851e 100644 --- a/frontend/src/scenes/insights/utils/api.ts +++ b/frontend/src/scenes/insights/utils/api.ts @@ -1,88 +1,47 @@ import api from 'lib/api' -import { getInsightFilterOrQueryForPersistance } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' import { getQueryBasedInsightModel } from '~/queries/nodes/InsightViz/utils' import { RefreshType } from '~/queries/schema' -import { InsightModel, InsightShortId, QueryBasedInsightModel } from '~/types' +import { InsightShortId, QueryBasedInsightModel } from '~/types' -export type InsightsApiOptions = { - writeAsQuery: boolean - readAsQuery: Flag -} - -export type ReadOnlyInsightsApiOptions = { - readAsQuery: Flag -} - -type ReturnedInsightModelByFlag = Flag extends true ? QueryBasedInsightModel : InsightModel - -export function getInsightModel( - insight: QueryBasedInsightModel, - asQuery: Flag -): ReturnedInsightModelByFlag { - return { - ...insight, - ...getInsightFilterOrQueryForPersistance(insight, asQuery), - } as ReturnedInsightModelByFlag -} - -async function _perform( +async function _perform( method: 'create' | 'update', insight: Partial, - options: InsightsApiOptions, id?: number -): Promise> { - const { writeAsQuery, readAsQuery } = options - - const data = getInsightModel(insight as QueryBasedInsightModel, writeAsQuery) - const legacyInsight = method === 'create' ? await api.insights[method](data) : await api.insights[method](id!, data) - - const response = readAsQuery ? getQueryBasedInsightModel(legacyInsight) : legacyInsight - return response as ReturnedInsightModelByFlag +): Promise { + const legacyInsight = + method === 'create' ? await api.insights[method](insight) : await api.insights[method](id!, insight) + return getQueryBasedInsightModel(legacyInsight) } export const insightsApi = { _perform, - async getByShortId( + async getByShortId( shortId: InsightShortId, - options: ReadOnlyInsightsApiOptions, basic?: boolean, refresh?: RefreshType - ): Promise | null> { + ): Promise { const legacyInsights = await api.insights.loadInsight(shortId, basic, refresh) if (legacyInsights.results.length === 0) { return null } const legacyInsight = legacyInsights.results[0] - const response = options.readAsQuery ? getQueryBasedInsightModel(legacyInsight) : legacyInsight - return response as ReturnedInsightModelByFlag + return getQueryBasedInsightModel(legacyInsight) as QueryBasedInsightModel }, - async getByNumericId( - numericId: number, - options: ReadOnlyInsightsApiOptions - ): Promise | null> { + async getByNumericId(numericId: number): Promise { const legacyInsight = await api.insights.get(numericId) - const response = - options.readAsQuery && legacyInsight !== null ? getQueryBasedInsightModel(legacyInsight) : legacyInsight - return response as ReturnedInsightModelByFlag + if (legacyInsight === null) { + return null + } + return getQueryBasedInsightModel(legacyInsight) }, - async create( - insight: Partial, - options: InsightsApiOptions - ): Promise> { - return this._perform('create', insight, options) + async create(insight: Partial): Promise { + return this._perform('create', insight) }, - async update( - id: number, - insightUpdate: Partial, - options: InsightsApiOptions - ): Promise> { - return this._perform('update', insightUpdate, options, id) + async update(id: number, insightUpdate: Partial): Promise { + return this._perform('update', insightUpdate, id) }, - async duplicate( - insight: QueryBasedInsightModel, - options: InsightsApiOptions - ): Promise> { - return this.create({ ...insight, name: insight.name ? `${insight.name} (copy)` : insight.name }, options) + async duplicate(insight: QueryBasedInsightModel): Promise { + return this.create({ ...insight, name: insight.name ? `${insight.name} (copy)` : insight.name }) }, } diff --git a/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx b/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx index 11c9bc9170a06..b9673746bc7e5 100644 --- a/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx +++ b/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx @@ -292,9 +292,9 @@ order by count() desc pos, buildNodeQueryContent({ kind: NodeKind.DataTableNode, - columns: defaultDataTableColumns(NodeKind.PersonsNode), + columns: defaultDataTableColumns(NodeKind.ActorsQuery), source: { - kind: NodeKind.PersonsNode, + kind: NodeKind.ActorsQuery, properties: [], }, }) diff --git a/frontend/src/scenes/notebooks/Notebook/migrations/migrate.ts b/frontend/src/scenes/notebooks/Notebook/migrations/migrate.ts index b90653f251135..dcf01df8b0d2a 100644 --- a/frontend/src/scenes/notebooks/Notebook/migrations/migrate.ts +++ b/frontend/src/scenes/notebooks/Notebook/migrations/migrate.ts @@ -33,7 +33,7 @@ import { TrendsFilter, TrendsFilterLegacy, } from '~/queries/schema' -import { FunnelExclusionLegacy, NotebookNodeType, NotebookType, RecordingFilters } from '~/types' +import { FunnelExclusionLegacy, LegacyRecordingFilters, NotebookNodeType, NotebookType } from '~/types' // NOTE: Increment this number when you add a new content migration // It will bust the cache on the localContent in the notebookLogic @@ -62,8 +62,8 @@ function convertPlaylistFiltersToUniversalFilters(content: JSONContent[]): JSONC } // Legacy attrs on Notebook playlist nodes - const simpleFilters = node.attrs?.simpleFilters as RecordingFilters - const filters = node.attrs?.filters as RecordingFilters + const simpleFilters = node.attrs?.simpleFilters as LegacyRecordingFilters + const filters = node.attrs?.filters as LegacyRecordingFilters const { universalFilters } = node.attrs as NotebookNodePlaylistAttributes diff --git a/frontend/src/scenes/pipeline/BatchExportBackfillModal.tsx b/frontend/src/scenes/pipeline/BatchExportBackfillModal.tsx index 41c0b9e0b48c7..0e9657c985f81 100644 --- a/frontend/src/scenes/pipeline/BatchExportBackfillModal.tsx +++ b/frontend/src/scenes/pipeline/BatchExportBackfillModal.tsx @@ -61,7 +61,7 @@ export function BatchExportBackfillModal({ id }: BatchExportRunsLogicProps): JSX // So, if a user of a daily export selects "2024-08-14" they mean "2024-08-14 00:00:00 in their // project's timezone". } - + {({ value, onChange }) => ( )} - + {({ value, onChange }) => ( @@ -137,19 +139,32 @@ export function PipelinePluginConfiguration({
- {stage === PipelineStage.Destination && ( - - - Warning! This destination is a legacy "plugin" destination. These will soon be deprecated - in favour of our flexible V2 Destinations that allow for more control and flexibility. - - + {hasHogFunctions && plugin?.hog_function_migration_available && ( + + LemonDialog.open({ + title: 'Upgrade destination', + width: '30rem', + description: + 'This will create a new Destination in the upgraded system. The old destination will be disabled and can later be deleted. In addition there may be slight differences in the configuration options that you can choose to modify.', + secondaryButton: { + type: 'secondary', + children: 'Cancel', + }, + primaryButton: { + type: 'primary', + onClick: () => migrateToHogFunction(), + children: 'Upgrade', + }, + }), + disabled: loading, + }} + > + New version available! This destination is part of our legacy system. Click to upgrade. + )}
([ } } + let upperBound = dayjs().tz(teamLogic.values.timezone) + let period = '1 hour' + + if (values.batchExportConfig && end_at) { + if (values.batchExportConfig.interval == 'hour') { + upperBound = upperBound.add(1, 'hour') + } else if (values.batchExportConfig.interval == 'day') { + upperBound = upperBound.hour(0).minute(0).second(0) + upperBound = upperBound.add(1, 'day') + period = '1 day' + } else if (values.batchExportConfig.interval.endsWith('minutes')) { + // TODO: Make this generic for all minute frequencies. + // Currently, only 5 minute batch exports are supported. + upperBound = upperBound.add(5, 'minute') + period = '5 minutes' + } else { + upperBound = upperBound.add(1, 'hour') + } + + if (end_at > upperBound) { + lemonToast.error( + `Requested backfill end date lies too far into the future. Use an end date that is no more than ${period} from now (in your project's timezone)` + ) + return + } + } + await new Promise((resolve) => setTimeout(resolve, 1000)) await api.batchExports .createBackfill(props.id, { diff --git a/frontend/src/scenes/pipeline/destinations/newDestinationsLogic.tsx b/frontend/src/scenes/pipeline/destinations/newDestinationsLogic.tsx index 9c05fe20b38b6..716acee77809e 100644 --- a/frontend/src/scenes/pipeline/destinations/newDestinationsLogic.tsx +++ b/frontend/src/scenes/pipeline/destinations/newDestinationsLogic.tsx @@ -137,15 +137,16 @@ export const newDestinationsLogic = kea([ ).url, status: hogFunction.status, })), - ...Object.values(plugins).map((plugin) => ({ - icon: , - name: plugin.name, - description: plugin.description || '', - backend: PipelineBackend.Plugin, - url: urls.pipelineNodeNew(PipelineStage.Destination, `${plugin.id}`), - status: hogFunctionsEnabled ? ('deprecated' as const) : undefined, - })), - + ...Object.values(plugins) + .filter((x) => !hogFunctionsEnabled || !x.hog_function_migration_available) + .map((plugin) => ({ + icon: , + name: plugin.name, + description: plugin.description || '', + backend: PipelineBackend.Plugin, + url: urls.pipelineNodeNew(PipelineStage.Destination, `${plugin.id}`), + status: hogFunctionsEnabled ? ('deprecated' as const) : undefined, + })), ...batchExportServiceNames.map((service) => ({ icon: , name: humanizeBatchExportName(service), diff --git a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx index 8fcb30b45dd4c..6057a4c81c91e 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx @@ -1,4 +1,4 @@ -import { IconInfo, IconPlus } from '@posthog/icons' +import { IconPlus } from '@posthog/icons' import { LemonBanner, LemonButton, @@ -7,6 +7,7 @@ import { LemonInput, LemonLabel, LemonSwitch, + LemonTag, LemonTextArea, Link, SpinnerOverlay, @@ -51,6 +52,7 @@ export function HogFunctionConfiguration({ templateId, id }: { templateId?: stri sparkline, sparklineLoading, template, + templateHasChanged, } = useValues(logic) const { submitConfiguration, @@ -211,9 +213,6 @@ export function HogFunctionConfiguration({ templateId, id }: { templateId?: stri
Close
- resetToTemplate()}> - Reset to template - New function from template + + {templateHasChanged ? ( + resetToTemplate()}> + Update + + ) : null}
} @@ -229,7 +234,10 @@ export function HogFunctionConfiguration({ templateId, id }: { templateId?: stri Built from template: {hogFunction?.template.name} - + + {templateHasChanged ? ( + Update available! + ) : null} @@ -318,6 +326,14 @@ export function HogFunctionConfiguration({ templateId, id }: { templateId?: stri Hide source code
+ + This is the underlying Hog code that will run whenever the + filters match.{' '} + + See the docs + {' '} + for more info + ) : undefined} - {showSource ? ( - <> - - inputs.{schema.key} - -
- } - onClick={() => setEditing(true)} - /> - - ) : null} + {showSource && ( + + inputs.{schema.key} + + )} +
+ + {supportsTemplating && ( + } + noPadding + className=" opacity-0 group-hover:opacity-100 p-1 transition-opacity" + > + Supports templating + + )} + {showSource && ( + } + onClick={() => setEditing(true)} + /> + )}
{value?.secret ? (
diff --git a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx index 363dfc3934878..c67a021e03987 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx @@ -487,6 +487,13 @@ export const hogFunctionConfigurationLogic = kea [s.hogFunction, s.configuration], + (hogFunction, configuration) => { + return hogFunction?.template?.hog && hogFunction.template.hog !== configuration.hog + }, + ], })), listeners(({ actions, values, cache }) => ({ @@ -583,8 +590,9 @@ export const hogFunctionConfigurationLogic = kea { diff --git a/frontend/src/scenes/pipeline/pipelinePluginConfigurationLogic.tsx b/frontend/src/scenes/pipeline/pipelinePluginConfigurationLogic.tsx index 2fbb2a872c90c..265de21a1dbd4 100644 --- a/frontend/src/scenes/pipeline/pipelinePluginConfigurationLogic.tsx +++ b/frontend/src/scenes/pipeline/pipelinePluginConfigurationLogic.tsx @@ -142,6 +142,17 @@ export const pipelinePluginConfigurationLogic = kea { + if (!props.pluginConfigId) { + return null + } + const hogFunction = await api.pluginConfigs.migrate(props.pluginConfigId) + + router.actions.replace(urls.pipelineNode(PipelineStage.Destination, `hog-${hogFunction.id}`)) + + return values.pluginConfig + }, }, ], })), diff --git a/frontend/src/scenes/saved-insights/SavedInsights.tsx b/frontend/src/scenes/saved-insights/SavedInsights.tsx index 54a9a39a7dbdc..ed0b4fc5110a7 100644 --- a/frontend/src/scenes/saved-insights/SavedInsights.tsx +++ b/frontend/src/scenes/saved-insights/SavedInsights.tsx @@ -17,6 +17,7 @@ import { IconStickiness, IconTrends, IconUserPaths, + IconVideoCamera, IconWarning, } from '@posthog/icons' import { LemonSelectOptions } from '@posthog/lemon-ui' @@ -315,6 +316,12 @@ export const QUERY_TYPES_METADATA: Record = { icon: IconWarning, inMenu: false, }, + [NodeKind.RecordingsQuery]: { + name: 'Session Recordings', + description: 'View available recordings', + icon: IconVideoCamera, + inMenu: false, + }, } export const INSIGHT_TYPE_OPTIONS: LemonSelectOptions = [ @@ -374,7 +381,7 @@ export function NewInsightButton({ dataAttr }: NewInsightButtonProps): JSX.Eleme function SavedInsightsGrid(): JSX.Element { const { loadInsights, renameInsight, duplicateInsight } = useActions(savedInsightsLogic) - const { insights, insightsLoading, pagination, queryBasedInsightSaving } = useValues(savedInsightsLogic) + const { insights, insightsLoading, pagination } = useValues(savedInsightsLogic) const { currentTeamId } = useValues(teamLogic) const paginationState = usePagination(insights?.results || [], pagination) @@ -394,10 +401,6 @@ function SavedInsightsGrid(): JSX.Element { object: insight, endpoint: `projects/${currentTeamId}/insights`, callback: loadInsights, - options: { - writeAsQuery: queryBasedInsightSaving, - readAsQuery: true, - }, }) } placement="SavedInsightGrid" @@ -419,8 +422,7 @@ function SavedInsightsGrid(): JSX.Element { export function SavedInsights(): JSX.Element { const { loadInsights, updateFavoritedInsight, renameInsight, duplicateInsight, setSavedInsightsFilters } = useActions(savedInsightsLogic) - const { insights, count, insightsLoading, filters, sorting, pagination, queryBasedInsightSaving } = - useValues(savedInsightsLogic) + const { insights, count, insightsLoading, filters, sorting, pagination } = useValues(savedInsightsLogic) const { hasTagging } = useValues(organizationLogic) const { currentTeamId } = useValues(teamLogic) const summarizeInsight = useSummarizeInsight() @@ -542,10 +544,6 @@ export function SavedInsights(): JSX.Element { object: insight, endpoint: `projects/${currentTeamId}/insights`, callback: loadInsights, - options: { - writeAsQuery: queryBasedInsightSaving, - readAsQuery: true, - }, }) } data-attr={`insight-item-${insight.short_id}-dropdown-remove`} diff --git a/frontend/src/scenes/saved-insights/savedInsightsLogic.ts b/frontend/src/scenes/saved-insights/savedInsightsLogic.ts index f8bc9c8ec4d5b..edf6b8d59e17c 100644 --- a/frontend/src/scenes/saved-insights/savedInsightsLogic.ts +++ b/frontend/src/scenes/saved-insights/savedInsightsLogic.ts @@ -2,12 +2,10 @@ import { actions, connect, kea, listeners, path, reducers, selectors } from 'kea import { loaders } from 'kea-loaders' import { actionToUrl, router, urlToAction } from 'kea-router' import api, { CountedPaginatedResponse } from 'lib/api' -import { FEATURE_FLAGS } from 'lib/constants' import { dayjs } from 'lib/dayjs' import { Sorting } from 'lib/lemon-ui/LemonTable' import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' import { PaginationManual } from 'lib/lemon-ui/PaginationControl' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { objectDiffShallow, objectsEqual, toParams } from 'lib/utils' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { deleteDashboardLogic } from 'scenes/dashboard/deleteDashboardLogic' @@ -69,7 +67,7 @@ function cleanFilters(values: Partial): SavedInsightFilters export const savedInsightsLogic = kea([ path(['scenes', 'saved-insights', 'savedInsightsLogic']), connect(() => ({ - values: [teamLogic, ['currentTeamId'], featureFlagLogic, ['featureFlags'], sceneLogic, ['activeScene']], + values: [teamLogic, ['currentTeamId'], sceneLogic, ['activeScene']], logic: [eventUsageLogic], })), actions({ @@ -112,7 +110,7 @@ export const savedInsightsLogic = kea([ if (filters.search && String(filters.search).match(/^[0-9]+$/)) { try { - const insight = await insightsApi.getByNumericId(Number(filters.search), { readAsQuery: true }) + const insight = await insightsApi.getByNumericId(Number(filters.search)) return { ...response, count: response.count + 1, @@ -141,13 +139,9 @@ export const savedInsightsLogic = kea([ } as CountedPaginatedResponse & { offset: number } }, updateFavoritedInsight: async ({ insight, favorited }) => { - const response = await insightsApi.update( - insight.id, - { - favorited, - }, - { writeAsQuery: values.queryBasedInsightSaving, readAsQuery: true } - ) + const response = await insightsApi.update(insight.id, { + favorited, + }) const updatedInsights = values.insights.results.map((i) => i.short_id === insight.short_id ? response : i ) @@ -181,10 +175,6 @@ export const savedInsightsLogic = kea([ ], }), selectors({ - queryBasedInsightSaving: [ - (s) => [s.featureFlags], - (featureFlags) => !!featureFlags[FEATURE_FLAGS.QUERY_BASED_INSIGHTS_SAVING], - ], filters: [(s) => [s.rawFilters], (rawFilters): SavedInsightFilters => cleanFilters(rawFilters || {})], count: [(s) => [s.insights], (insights) => insights.count], usingFilters: [ @@ -287,10 +277,7 @@ export const savedInsightsLogic = kea([ insightsModel.actions.renameInsight(insight) }, duplicateInsight: async ({ insight, redirectToInsight }) => { - const newInsight = await insightsApi.duplicate(insight, { - writeAsQuery: values.queryBasedInsightSaving, - readAsQuery: true, - }) + const newInsight = await insightsApi.duplicate(insight) actions.addInsight(newInsight) redirectToInsight && router.actions.push(urls.insightEdit(newInsight.short_id)) }, @@ -355,7 +342,7 @@ export const savedInsightsLogic = kea([ // `fromItem` for legacy /insights url redirect support const insightNumericId = parseInt(hashParams.fromItem) try { - const insight = await insightsApi.getByNumericId(insightNumericId, { readAsQuery: true }) + const insight = await insightsApi.getByNumericId(insightNumericId) if (!insight?.short_id) { throw new Error('Could not find insight or missing short_id') } diff --git a/frontend/src/scenes/session-recordings/filters/ReplayTaxonomicFilters.tsx b/frontend/src/scenes/session-recordings/filters/ReplayTaxonomicFilters.tsx index a87d5bc1e25b2..0379373bed975 100644 --- a/frontend/src/scenes/session-recordings/filters/ReplayTaxonomicFilters.tsx +++ b/frontend/src/scenes/session-recordings/filters/ReplayTaxonomicFilters.tsx @@ -24,22 +24,26 @@ export function ReplayTaxonomicFilters({ onChange }: ReplayTaxonomicFiltersProps return !!filters.find((f) => f.type === PropertyFilterType.Recording && f.key === key) } - const recordingProperties = [ + const properties = [ { label: 'Visited page', key: 'visited_page', + propertyFilterType: PropertyFilterType.Recording, }, { label: 'Platform', key: 'snapshot_source', + propertyFilterType: PropertyFilterType.Recording, }, { label: 'Console log level', key: 'console_log_level', + propertyFilterType: PropertyFilterType.LogEntry, }, { label: 'Console log text', key: 'console_log_query', + propertyFilterType: PropertyFilterType.LogEntry, }, ] @@ -48,12 +52,12 @@ export function ReplayTaxonomicFilters({ onChange }: ReplayTaxonomicFiltersProps
Replay properties
    - {recordingProperties.map(({ key, label }) => ( + {properties.map(({ key, label, propertyFilterType }) => ( onChange(key, {})} + onClick={() => onChange(key, { propertyFilterType: propertyFilterType })} disabledReason={hasFilter(key) ? `${label} filter already added` : undefined} > {label} diff --git a/frontend/src/scenes/session-recordings/player/PlayerSettings.tsx b/frontend/src/scenes/session-recordings/player/PlayerSettings.tsx new file mode 100644 index 0000000000000..615cda9768b4f --- /dev/null +++ b/frontend/src/scenes/session-recordings/player/PlayerSettings.tsx @@ -0,0 +1,109 @@ +import { IconEllipsis, IconFastForward } from '@posthog/icons' +import { LemonButton, LemonMenu, LemonSelect, LemonSwitch } from '@posthog/lemon-ui' +import clsx from 'clsx' +import { useActions, useValues } from 'kea' + +import { playerSettingsLogic } from './playerSettingsLogic' +import { PLAYBACK_SPEEDS } from './sessionRecordingPlayerLogic' + +export const PlayerSettings = (): JSX.Element => { + const { speed, autoplayDirection, skipInactivitySetting, showMouseTail, showSeekbarTicks } = + useValues(playerSettingsLogic) + const { setSpeed, setAutoplayDirection, setSkipInactivitySetting, setShowMouseTail, setShowSeekbarTicks } = + useActions(playerSettingsLogic) + + return ( +
    + ( +
    + Autoplay + + + Autoplay next recording +
    ({!autoplayDirection ? 'off' : autoplayDirection}) +
    + } + value={autoplayDirection} + aria-label="Autoplay next recording" + onChange={setAutoplayDirection} + dropdownPlacement="bottom-end" + dropdownMatchSelectWidth={false} + options={[ + { value: null, label: 'Off' }, + { value: 'newer', label: 'Newer recordings' }, + { value: 'older', label: 'Older recordings' }, + ]} + size="small" + /> +
    + ), + }, + { + custom: true, + label: () => ( + + ), + }, + { + custom: true, + label: () => ( + + ), + }, + { + custom: true, + label: () => ( + + } + fullWidth + data-attr="skip-inactivity" + className="px-2 py-1" + checked={skipInactivitySetting} + onChange={setSkipInactivitySetting} + label="Skip inactivity" + /> + ), + }, + { + label: `Playback speed (${speed}x)`, + items: PLAYBACK_SPEEDS.map((speedToggle) => ({ + label: `${speedToggle}x`, + onClick: () => setSpeed(speedToggle), + active: speedToggle === speed, + })), + placement: 'right-end', + }, + ]} + > + } /> + +
+ ) +} diff --git a/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx b/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx index 061ffee0c56cf..96e27ef39df1d 100644 --- a/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx +++ b/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx @@ -1,20 +1,15 @@ -import { IconFastForward, IconPause, IconPlay } from '@posthog/icons' -import { LemonMenu, LemonSwitch } from '@posthog/lemon-ui' +import { IconPause, IconPlay } from '@posthog/icons' import clsx from 'clsx' import { useActions, useValues } from 'kea' import { IconFullScreen, IconSync } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' -import { Tooltip } from 'lib/lemon-ui/Tooltip' -import { - PLAYBACK_SPEEDS, - sessionRecordingPlayerLogic, -} from 'scenes/session-recordings/player/sessionRecordingPlayerLogic' +import { sessionRecordingPlayerLogic } from 'scenes/session-recordings/player/sessionRecordingPlayerLogic' import { KeyboardShortcut } from '~/layout/navigation-3000/components/KeyboardShortcut' import { SessionPlayerState } from '~/types' import { PlayerMetaLinks } from '../PlayerMetaLinks' -import { playerSettingsLogic } from '../playerSettingsLogic' +import { PlayerSettings } from '../PlayerSettings' import { SeekSkip, Timestamp } from './PlayerControllerTime' import { Seekbar } from './Seekbar' @@ -22,9 +17,6 @@ export function PlayerController({ linkIconsOnly }: { linkIconsOnly: boolean }): const { playingState, isFullScreen, endReached } = useValues(sessionRecordingPlayerLogic) const { togglePlayPause, setIsFullScreen } = useActions(sessionRecordingPlayerLogic) - const { speed, skipInactivitySetting } = useValues(playerSettingsLogic) - const { setSpeed, setSkipInactivitySetting } = useActions(playerSettingsLogic) - const showPause = playingState === SessionPlayerState.PLAY return ( @@ -54,41 +46,17 @@ export function PlayerController({ linkIconsOnly }: { linkIconsOnly: boolean }):
- ({ - label: `${speedToggle}x`, - onClick: () => setSpeed(speedToggle), - }))} + setIsFullScreen(!isFullScreen)} + tooltip={`${!isFullScreen ? 'Go' : 'Exit'} full screen (F)`} > - - {speed}x - - - - } - /> -
-
- - setIsFullScreen(!isFullScreen)}> - - - + +
+ diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.stories.tsx b/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.stories.tsx index 616a80e09bfc5..56d2b9346b0c0 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.stories.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.stories.tsx @@ -165,3 +165,41 @@ WebVitalsEvent.args = { } ), } + +export const GroupIdentifyEvent: Story = BasicTemplate.bind({}) +GroupIdentifyEvent.args = { + item: makeItem( + {}, + { event: '$groupidentify' }, + { + $os: 'Mac OS X', + $os_version: '10.15.7', + $browser: 'Chrome', + $device_type: 'Desktop', + $current_url: 'https://us.posthog.com/project/2/insights/new', + $host: 'us.posthog.com', + $pathname: '/project/2/insights/new', + $initial_person_info: { + r: '$direct', + u: 'https://us.posthog.com/project/2', + }, + $groups: { + project: 'fc445b88-e2c4-488e-bb52-aa80cd7918c9', + organization: '4dc8564d-bd82-1065-2f40-97f7c50f67cf', + customer: 'cus_IK2DWsWVn2ZM16', + instance: 'https://us.posthog.com', + }, + $group_type: 'instance', + $group_key: 'https://us.posthog.com', + $group_set: { + site_url: 'https://us.posthog.com', + }, + $session_id: '01917043-b2a1-7c2e-a57e-6db514bde084', + $window_id: '01917043-b2a1-7c2e-a57e-6db6676bb4a1', + $group_2: 'fc445b88-e2c4-488e-bb52-aa80cd7918c9', + $group_0: '4dc8564d-bd82-1065-2f40-97f7c50f67cf', + $group_3: 'cus_IK2DWsWVn2ZM16', + $group_1: 'https://us.posthog.com', + } + ), +} diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx b/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx index f5cfe244cb075..699ae426b1c6d 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx @@ -64,6 +64,8 @@ export function ItemEvent({ item, expanded, setExpanded }: ItemEventProps): JSX. let promotedKeys: string[] | undefined = undefined if (item.data.event === '$pageview') { promotedKeys = ['$current_url', '$title', '$referrer'] + } else if (item.data.event === '$groupidentify') { + promotedKeys = ['$group_type', '$group_key', '$group_set'] } else if (item.data.event === '$screen') { promotedKeys = ['$screen_name'] } else if (item.data.event === '$web_vitals') { diff --git a/frontend/src/scenes/session-recordings/player/inspector/inspectorListFiltering.test.ts b/frontend/src/scenes/session-recordings/player/inspector/inspectorListFiltering.test.ts new file mode 100644 index 0000000000000..ae854da7b1330 --- /dev/null +++ b/frontend/src/scenes/session-recordings/player/inspector/inspectorListFiltering.test.ts @@ -0,0 +1,182 @@ +import { filterInspectorListItems } from 'scenes/session-recordings/player/inspector/inspectorListFiltering' +import { + InspectorListBrowserVisibility, + InspectorListItemDoctor, + InspectorListItemEvent, + InspectorListOfflineStatusChange, +} from 'scenes/session-recordings/player/inspector/playerInspectorLogic' +import { SharedListMiniFilter } from 'scenes/session-recordings/player/playerSettingsLogic' + +import { PerformanceEvent, SessionRecordingPlayerTab } from '~/types' + +describe('filtering inspector list items', () => { + describe('the all tab', () => { + it('includes browser visibility', () => { + expect( + filterInspectorListItems({ + allItems: [ + { + type: 'browser-visibility', + } as InspectorListBrowserVisibility, + ], + tab: SessionRecordingPlayerTab.ALL, + miniFiltersByKey: { 'all-everything': { enabled: true } as unknown as SharedListMiniFilter }, + showOnlyMatching: false, + showMatchingEventsFilter: false, + windowIdFilter: null, + }) + ).toHaveLength(1) + }) + + it('hides doctor items in everything mode', () => { + const filteredItems = filterInspectorListItems({ + allItems: [ + { + type: 'browser-visibility', + } as InspectorListBrowserVisibility, + { + type: 'doctor', + } as InspectorListItemDoctor, + ], + tab: SessionRecordingPlayerTab.ALL, + miniFiltersByKey: { 'all-everything': { enabled: true } as unknown as SharedListMiniFilter }, + showOnlyMatching: false, + showMatchingEventsFilter: false, + windowIdFilter: null, + }) + expect(filteredItems.map((item) => item.type)).toEqual(['browser-visibility']) + }) + }) + + describe('the events tab', () => { + it('filters by window id', () => { + expect( + filterInspectorListItems({ + allItems: [ + { + type: SessionRecordingPlayerTab.EVENTS, + windowId: 'this window', + data: { event: '$exception' } as unknown as PerformanceEvent, + } as unknown as InspectorListItemEvent, + { + type: SessionRecordingPlayerTab.EVENTS, + windowId: 'a different window', + data: { event: '$exception' } as unknown as PerformanceEvent, + } as unknown as InspectorListItemEvent, + ], + tab: SessionRecordingPlayerTab.EVENTS, + miniFiltersByKey: { 'events-all': { enabled: true } as unknown as SharedListMiniFilter }, + showOnlyMatching: false, + showMatchingEventsFilter: false, + windowIdFilter: 'a different window', + }) + ).toHaveLength(1) + }) + + it('excludes browser visibility on console filter', () => { + expect( + filterInspectorListItems({ + allItems: [ + { + type: 'browser-visibility', + } as InspectorListBrowserVisibility, + ], + tab: SessionRecordingPlayerTab.EVENTS, + miniFiltersByKey: { 'all-everything': { enabled: false } as unknown as SharedListMiniFilter }, + showOnlyMatching: false, + showMatchingEventsFilter: false, + windowIdFilter: null, + }) + ).toHaveLength(0) + }) + + it('excludes browser visibility when show only matching', () => { + expect( + filterInspectorListItems({ + allItems: [ + { + type: 'browser-visibility', + } as InspectorListBrowserVisibility, + ], + tab: SessionRecordingPlayerTab.EVENTS, + miniFiltersByKey: { 'all-everything': { enabled: true } as unknown as SharedListMiniFilter }, + showOnlyMatching: true, + showMatchingEventsFilter: true, + windowIdFilter: null, + }) + ).toHaveLength(0) + }) + }) + + describe('the doctor tab', () => { + it('ignores events that are not exceptions', () => { + expect( + filterInspectorListItems({ + allItems: [ + { + type: SessionRecordingPlayerTab.EVENTS, + data: { event: 'an event' } as unknown as PerformanceEvent, + } as unknown as InspectorListItemEvent, + ], + tab: SessionRecordingPlayerTab.DOCTOR, + miniFiltersByKey: {}, + showOnlyMatching: false, + showMatchingEventsFilter: false, + windowIdFilter: null, + }) + ).toHaveLength(0) + }) + + it('includes events that are exceptions', () => { + expect( + filterInspectorListItems({ + allItems: [ + { + type: SessionRecordingPlayerTab.EVENTS, + data: { event: '$exception' } as unknown as PerformanceEvent, + } as unknown as InspectorListItemEvent, + ], + tab: SessionRecordingPlayerTab.DOCTOR, + miniFiltersByKey: {}, + showOnlyMatching: false, + showMatchingEventsFilter: false, + windowIdFilter: null, + }) + ).toHaveLength(1) + }) + + it('includes browser offline status', () => { + expect( + filterInspectorListItems({ + allItems: [ + { + type: 'offline-status', + } as unknown as InspectorListOfflineStatusChange, + ], + tab: SessionRecordingPlayerTab.DOCTOR, + miniFiltersByKey: {}, + showOnlyMatching: false, + showMatchingEventsFilter: false, + windowIdFilter: null, + }) + ).toHaveLength(1) + }) + + it('includes browser visibility status', () => { + expect( + filterInspectorListItems({ + allItems: [ + { + type: 'browser-visibility', + } as InspectorListBrowserVisibility, + ], + tab: SessionRecordingPlayerTab.DOCTOR, + miniFiltersByKey: {}, + showOnlyMatching: false, + showMatchingEventsFilter: false, + windowIdFilter: null, + }) + ).toHaveLength(1) + }) + }) +}) diff --git a/frontend/src/scenes/session-recordings/player/inspector/inspectorListFiltering.ts b/frontend/src/scenes/session-recordings/player/inspector/inspectorListFiltering.ts new file mode 100644 index 0000000000000..481170e46f659 --- /dev/null +++ b/frontend/src/scenes/session-recordings/player/inspector/inspectorListFiltering.ts @@ -0,0 +1,222 @@ +import { InspectorListItemPerformance } from 'scenes/session-recordings/apm/performanceEventDataLogic' +import { + IMAGE_WEB_EXTENSIONS, + InspectorListBrowserVisibility, + InspectorListItem, + InspectorListItemConsole, + InspectorListItemDoctor, + InspectorListItemEvent, + InspectorListOfflineStatusChange, +} from 'scenes/session-recordings/player/inspector/playerInspectorLogic' +import type { SharedListMiniFilter } from 'scenes/session-recordings/player/playerSettingsLogic' + +import { SessionRecordingPlayerTab } from '~/types' + +const PostHogMobileEvents = [ + 'Deep Link Opened', + 'Application Opened', + 'Application Backgrounded', + 'Application Updated', + 'Application Installed', + 'Application Became Active', +] + +function isPostHogMobileEvent(item: InspectorListItem): boolean { + return isEvent(item) && PostHogMobileEvents.includes(item.data.event) +} + +function isPostHogEvent(item: InspectorListItem): boolean { + return (isEvent(item) && item.data.event.startsWith('$')) || isPostHogMobileEvent(item) +} + +function isNetworkEvent(item: InspectorListItem): item is InspectorListItemPerformance { + return item.type === SessionRecordingPlayerTab.NETWORK +} + +function isOfflineStatusChange(item: InspectorListItem): item is InspectorListOfflineStatusChange { + return item.type === 'offline-status' +} + +function isBrowserVisibilityEvent(item: InspectorListItem): item is InspectorListBrowserVisibility { + return item.type === 'browser-visibility' +} + +function isNavigationEvent(item: InspectorListItem): boolean { + return isNetworkEvent(item) && ['navigation'].includes(item.data.entry_type || '') +} + +function isNetworkError(item: InspectorListItem): boolean { + return isNetworkEvent(item) && (item.data.response_status || -1) >= 400 +} + +function isSlowNetwork(item: InspectorListItem): boolean { + return isNetworkEvent(item) && (item.data.duration || -1) >= 1000 +} + +function isEvent(item: InspectorListItem): item is InspectorListItemEvent { + return item.type === SessionRecordingPlayerTab.EVENTS +} + +function isPageviewOrScreen(item: InspectorListItem): boolean { + return isEvent(item) && ['$pageview', '$screen'].includes(item.data.event) +} + +function isAutocapture(item: InspectorListItem): boolean { + return isEvent(item) && item.data.event === '$autocapture' +} + +function isConsoleEvent(item: InspectorListItem): item is InspectorListItemConsole { + return item.type === SessionRecordingPlayerTab.CONSOLE +} + +function isConsoleError(item: InspectorListItem): boolean { + return isConsoleEvent(item) && item.data.level === 'error' +} + +function isException(item: InspectorListItem): boolean { + return isEvent(item) && item.data.event === '$exception' +} + +function isErrorEvent(item: InspectorListItem): boolean { + return isEvent(item) && item.data.event.toLowerCase().includes('error') +} + +function isDoctorEvent(item: InspectorListItem): item is InspectorListItemDoctor { + return item.type === 'doctor' +} + +export function filterInspectorListItems({ + allItems, + tab, + miniFiltersByKey, + showMatchingEventsFilter, + showOnlyMatching, + windowIdFilter, +}: { + allItems: InspectorListItem[] + tab: SessionRecordingPlayerTab + miniFiltersByKey: { + [key: string]: SharedListMiniFilter + } + showMatchingEventsFilter: boolean + showOnlyMatching: boolean + windowIdFilter: string | null +}): InspectorListItem[] { + const items: InspectorListItem[] = [] + + const shortCircuitExclude = (item: InspectorListItem): boolean => + isNetworkEvent(item) && item.data.entry_type === 'paint' + + const inspectorTabFilters: Record boolean> = { + [SessionRecordingPlayerTab.ALL]: (item: InspectorListItem) => { + // even in everything mode we don't show doctor events + const isAllEverything = miniFiltersByKey['all-everything']?.enabled === true && !isDoctorEvent(item) + const isAllAutomatic = + !!miniFiltersByKey['all-automatic']?.enabled && + (isOfflineStatusChange(item) || + isBrowserVisibilityEvent(item) || + isNavigationEvent(item) || + isNetworkError(item) || + isSlowNetwork(item) || + isPostHogMobileEvent(item) || + isPageviewOrScreen(item) || + isAutocapture(item)) + const isAllErrors = + (!!miniFiltersByKey['all-errors']?.enabled && isNetworkError(item)) || + isConsoleError(item) || + isException(item) || + isErrorEvent(item) + return isAllEverything || isAllAutomatic || isAllErrors + }, + [SessionRecordingPlayerTab.EVENTS]: (item: InspectorListItem) => { + if (item.type !== SessionRecordingPlayerTab.EVENTS) { + return false + } + return ( + !!miniFiltersByKey['events-all']?.enabled || + (!!miniFiltersByKey['events-posthog']?.enabled && isPostHogEvent(item)) || + (!!miniFiltersByKey['events-custom']?.enabled && !isPostHogEvent(item)) || + (!!miniFiltersByKey['events-pageview']?.enabled && + ['$pageview', '$screen'].includes(item.data.event)) || + (!!miniFiltersByKey['events-autocapture']?.enabled && item.data.event === '$autocapture') || + (!!miniFiltersByKey['events-exceptions']?.enabled && item.data.event === '$exception') + ) + }, + [SessionRecordingPlayerTab.CONSOLE]: (item: InspectorListItem) => { + if (item.type !== SessionRecordingPlayerTab.CONSOLE) { + return false + } + return ( + !!miniFiltersByKey['console-all']?.enabled || + (!!miniFiltersByKey['console-info']?.enabled && ['log', 'info'].includes(item.data.level)) || + (!!miniFiltersByKey['console-warn']?.enabled && item.data.level === 'warn') || + (!!miniFiltersByKey['console-error']?.enabled && isConsoleError(item)) + ) + }, + [SessionRecordingPlayerTab.NETWORK]: (item: InspectorListItem) => { + if (item.type !== SessionRecordingPlayerTab.NETWORK) { + return false + } + return ( + !!miniFiltersByKey['performance-all']?.enabled === true || + (!!miniFiltersByKey['performance-document']?.enabled && isNavigationEvent(item)) || + (!!miniFiltersByKey['performance-fetch']?.enabled && + item.data.entry_type === 'resource' && + ['fetch', 'xmlhttprequest'].includes(item.data.initiator_type || '')) || + (!!miniFiltersByKey['performance-assets-js']?.enabled && + item.data.entry_type === 'resource' && + (item.data.initiator_type === 'script' || + (['link', 'other'].includes(item.data.initiator_type || '') && + item.data.name?.includes('.js')))) || + (!!miniFiltersByKey['performance-assets-css']?.enabled && + item.data.entry_type === 'resource' && + (item.data.initiator_type === 'css' || + (['link', 'other'].includes(item.data.initiator_type || '') && + item.data.name?.includes('.css')))) || + (!!miniFiltersByKey['performance-assets-img']?.enabled && + item.data.entry_type === 'resource' && + (item.data.initiator_type === 'img' || + (['link', 'other'].includes(item.data.initiator_type || '') && + !!IMAGE_WEB_EXTENSIONS.some((ext) => item.data.name?.includes(`.${ext}`))))) || + (!!miniFiltersByKey['performance-other']?.enabled && + item.data.entry_type === 'resource' && + ['other'].includes(item.data.initiator_type || '') && + ![...IMAGE_WEB_EXTENSIONS, 'css', 'js'].some((ext) => item.data.name?.includes(`.${ext}`))) + ) + }, + [SessionRecordingPlayerTab.DOCTOR]: (item: InspectorListItem) => { + return ( + isOfflineStatusChange(item) || + isBrowserVisibilityEvent(item) || + isException(item) || + isDoctorEvent(item) + ) + }, + } + + for (const item of allItems) { + let include = false + + if (shortCircuitExclude(item)) { + continue + } + + include = inspectorTabFilters[tab](item) + + if (showMatchingEventsFilter && showOnlyMatching) { + // Special case - overrides the others + include = include && item.highlightColor === 'primary' + } + + const itemWindowId = item.windowId // how do we use sometimes properties $window_id... maybe we just shouldn't need to :shrug: + const excludedByWindowFilter = !!windowIdFilter && !!itemWindowId && itemWindowId !== windowIdFilter + + if (!include || excludedByWindowFilter) { + continue + } + + items.push(item) + } + + return items +} diff --git a/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts b/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts index 207491cbc0fe0..110c277aa8743 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts +++ b/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts @@ -12,9 +12,10 @@ import { InspectorListItemPerformance, performanceEventDataLogic, } from 'scenes/session-recordings/apm/performanceEventDataLogic' +import { filterInspectorListItems } from 'scenes/session-recordings/player/inspector/inspectorListFiltering' import { playerSettingsLogic } from 'scenes/session-recordings/player/playerSettingsLogic' import { - convertUniversalFiltersToLegacyFilters, + convertUniversalFiltersToRecordingsQuery, MatchingEventsMatchType, } from 'scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic' @@ -100,23 +101,6 @@ export interface PlayerInspectorLogicProps extends SessionRecordingPlayerLogicPr matchingEventsMatchType?: MatchingEventsMatchType } -const PostHogMobileEvents = [ - 'Deep Link Opened', - 'Application Opened', - 'Application Backgrounded', - 'Application Updated', - 'Application Installed', - 'Application Became Active', -] - -function isMobileEvent(item: InspectorListItemEvent): boolean { - return PostHogMobileEvents.includes(item.data.event) -} - -function isPostHogEvent(item: InspectorListItemEvent): boolean { - return item.data.event.startsWith('$') || isMobileEvent(item) -} - function _isCustomSnapshot(x: unknown): x is customEvent { return (x as customEvent).type === 5 } @@ -168,7 +152,7 @@ export const playerInspectorLogic = kea([ ], values: [ playerSettingsLogic, - ['showOnlyMatching', 'tab', 'miniFiltersByKey', 'searchQuery'], + ['showOnlyMatching', 'showSeekbarTicks', 'tab', 'miniFiltersByKey', 'searchQuery'], sessionRecordingDataLogic(props), [ 'sessionPlayerData', @@ -245,7 +229,7 @@ export const playerInspectorLogic = kea([ throw new Error('Backend matching events type must include its filters') } const params = toParams({ - ...convertUniversalFiltersToLegacyFilters(filters), + ...convertUniversalFiltersToRecordingsQuery(filters), session_ids: [props.sessionRecordingId], }) const response = await api.recordings.getMatchingEvents(params) @@ -352,7 +336,7 @@ export const playerInspectorLogic = kea([ const customEvent = snapshot as customEvent const tag = customEvent.data.tag - if (tag === '$pageview') { + if (['$pageview', 'window hidden', 'browser offline', 'browser online'].includes(tag)) { return } @@ -579,241 +563,30 @@ export const playerInspectorLogic = kea([ showMatchingEventsFilter, windowIdFilter ): InspectorListItem[] => { - const items: InspectorListItem[] = [] - - for (const item of allItems) { - let include = false - - // always show offline status changes - if (item.type === 'offline-status' || item.type === 'browser-visibility') { - include = - tab === SessionRecordingPlayerTab.DOCTOR || - !!( - miniFiltersByKey['performance-all']?.enabled || - miniFiltersByKey['all-everything']?.enabled || - miniFiltersByKey['all-automatic']?.enabled || - miniFiltersByKey['console-all']?.enabled || - miniFiltersByKey['events-all']?.enabled - ) - } - - if (item.type === SessionRecordingPlayerTab.DOCTOR && tab === SessionRecordingPlayerTab.DOCTOR) { - include = true - } - - // EVENTS - if (item.type === SessionRecordingPlayerTab.EVENTS) { - if ( - tab === SessionRecordingPlayerTab.DOCTOR && - (item.data.event === '$exception' || item.data.event.toLowerCase().includes('error')) - ) { - include = true - } - - if (tab !== SessionRecordingPlayerTab.EVENTS && tab !== SessionRecordingPlayerTab.ALL) { - continue - } - - if (miniFiltersByKey['events-all']?.enabled || miniFiltersByKey['all-everything']?.enabled) { - include = true - } - if (miniFiltersByKey['events-posthog']?.enabled && isPostHogEvent(item)) { - include = true - } - // include Mobile events as part of the Auto-Summary - if (miniFiltersByKey['all-automatic']?.enabled && isMobileEvent(item)) { - include = true - } - if ( - (miniFiltersByKey['events-custom']?.enabled || - miniFiltersByKey['all-automatic']?.enabled) && - !isPostHogEvent(item) - ) { - include = true - } - if ( - (miniFiltersByKey['events-pageview']?.enabled || - miniFiltersByKey['all-automatic']?.enabled) && - ['$pageview', '$screen'].includes(item.data.event) - ) { - include = true - } - if ( - (miniFiltersByKey['events-autocapture']?.enabled || - miniFiltersByKey['all-automatic']?.enabled) && - item.data.event === '$autocapture' - ) { - include = true - } - - if ( - (miniFiltersByKey['all-errors']?.enabled || - miniFiltersByKey['events-exceptions']?.enabled) && - (item.data.event === '$exception' || item.data.event.toLowerCase().includes('error')) - ) { - include = true - } - - if (showMatchingEventsFilter && showOnlyMatching) { - // Special case - overrides the others - include = include && item.highlightColor === 'primary' - } - - if (windowIdFilter && item.data.properties?.$window_id !== windowIdFilter) { - include = false - } - } - - // CONSOLE LOGS - if (item.type === SessionRecordingPlayerTab.CONSOLE) { - if (tab === SessionRecordingPlayerTab.DOCTOR && item.data.level === 'error') { - include = true - } - - if (tab !== SessionRecordingPlayerTab.CONSOLE && tab !== SessionRecordingPlayerTab.ALL) { - continue - } - - if (miniFiltersByKey['console-all']?.enabled || miniFiltersByKey['all-everything']?.enabled) { - include = true - } - if (miniFiltersByKey['console-info']?.enabled && ['log', 'info'].includes(item.data.level)) { - include = true - } - if ( - (miniFiltersByKey['console-warn']?.enabled || miniFiltersByKey['all-automatic']?.enabled) && - item.data.level === 'warn' - ) { - include = true - } - if ( - (miniFiltersByKey['console-error']?.enabled || - miniFiltersByKey['all-errors']?.enabled || - miniFiltersByKey['all-automatic']?.enabled) && - item.data.level === 'error' - ) { - include = true - } - - if (windowIdFilter && item.data.windowId !== windowIdFilter) { - include = false - } - } - - // NETWORK - if (item.type === SessionRecordingPlayerTab.NETWORK) { - if (tab !== SessionRecordingPlayerTab.NETWORK && tab !== SessionRecordingPlayerTab.ALL) { - continue - } - - const responseStatus = item.data.response_status || 200 - const responseTime = item.data.duration || 0 - - if ( - miniFiltersByKey['performance-all']?.enabled || - miniFiltersByKey['all-everything']?.enabled - ) { - include = true - } - if ( - (miniFiltersByKey['performance-document']?.enabled || - miniFiltersByKey['all-automatic']?.enabled) && - ['navigation'].includes(item.data.entry_type || '') - ) { - include = true - } - if ( - miniFiltersByKey['performance-fetch']?.enabled && - item.data.entry_type === 'resource' && - ['fetch', 'xmlhttprequest'].includes(item.data.initiator_type || '') - ) { - include = true - } - - if ( - miniFiltersByKey['performance-assets-js']?.enabled && - item.data.entry_type === 'resource' && - (item.data.initiator_type === 'script' || - (['link', 'other'].includes(item.data.initiator_type || '') && - item.data.name?.includes('.js'))) - ) { - include = true - } - - if ( - miniFiltersByKey['performance-assets-css']?.enabled && - item.data.entry_type === 'resource' && - (item.data.initiator_type === 'css' || - (['link', 'other'].includes(item.data.initiator_type || '') && - item.data.name?.includes('.css'))) - ) { - include = true - } - - if ( - miniFiltersByKey['performance-assets-img']?.enabled && - item.data.entry_type === 'resource' && - (item.data.initiator_type === 'img' || - (['link', 'other'].includes(item.data.initiator_type || '') && - !!IMAGE_WEB_EXTENSIONS.some((ext) => item.data.name?.includes(`.${ext}`)))) - ) { - include = true - } - - if ( - miniFiltersByKey['performance-other']?.enabled && - item.data.entry_type === 'resource' && - ['other'].includes(item.data.initiator_type || '') && - ![...IMAGE_WEB_EXTENSIONS, 'css', 'js'].some((ext) => item.data.name?.includes(`.${ext}`)) - ) { - include = true - } - - if ( - (miniFiltersByKey['all-errors']?.enabled || miniFiltersByKey['all-automatic']?.enabled) && - responseStatus >= 400 - ) { - include = true - } - - if (miniFiltersByKey['all-automatic']?.enabled && responseTime >= 1000) { - include = true - } - - if (windowIdFilter && item.data.window_id !== windowIdFilter) { - include = false - } - - if (item.data.entry_type === 'paint') { - // We don't include paint events as they are covered in the navigation events - include = false - } - } - - if (!include) { - continue - } - - items.push(item) - } - - return items + return filterInspectorListItems({ + allItems, + tab, + miniFiltersByKey, + showMatchingEventsFilter, + showOnlyMatching, + windowIdFilter, + }) }, ], seekbarItems: [ - (s) => [s.allItems, s.showOnlyMatching, s.showMatchingEventsFilter], - (allItems, showOnlyMatching, showMatchingEventsFilter): InspectorListItemEvent[] => { + (s) => [s.allItems, s.showOnlyMatching, s.showSeekbarTicks, s.showMatchingEventsFilter], + (allItems, showOnlyMatching, showSeekbarTicks, showMatchingEventsFilter): InspectorListItemEvent[] => { let items = allItems.filter((item) => { if (item.type !== SessionRecordingPlayerTab.EVENTS) { return false } - if (showMatchingEventsFilter && showOnlyMatching && item.highlightColor !== 'primary') { + if (!showSeekbarTicks && ['$pageview', '$screen'].includes(item.data.event)) { return false } - return true + return !(showMatchingEventsFilter && showOnlyMatching && item.highlightColor !== 'primary') }) as InspectorListItemEvent[] if (items.length > MAX_SEEKBAR_ITEMS) { @@ -822,11 +595,7 @@ export const playerInspectorLogic = kea([ }) items = items.filter((_, i) => { - if (i % Math.ceil(items.length / MAX_SEEKBAR_ITEMS) === 0) { - return true - } - - return false + return i % Math.ceil(items.length / MAX_SEEKBAR_ITEMS) === 0 }) } diff --git a/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts b/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts index 7de31840ebca2..96021bb74ba16 100644 --- a/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts +++ b/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts @@ -203,6 +203,8 @@ export const playerSettingsLogic = kea([ setTimestampFormat: (format: TimestampFormat) => ({ format }), setPreferredInspectorStacking: (stacking: InspectorStacking) => ({ stacking }), setPlaybackViewMode: (mode: PlaybackViewMode) => ({ mode }), + setShowMouseTail: (showMouseTail: boolean) => ({ showMouseTail }), + setShowSeekbarTicks: (show: boolean) => ({ show }), }), connect({ values: [teamLogic, ['currentTeam']], @@ -289,6 +291,20 @@ export const playerSettingsLogic = kea([ setHideViewedRecordings: (_, { hideViewedRecordings }) => hideViewedRecordings, }, ], + showMouseTail: [ + true, + { persist: true }, + { + setShowMouseTail: (_, { showMouseTail }) => showMouseTail, + }, + ], + showSeekbarTicks: [ + true, + { persist: true }, + { + setShowSeekbarTicks: (_, { show }) => show, + }, + ], // Inspector tab: [ diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts index 8dfeb641055ff..dd56272db691a 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts @@ -113,7 +113,7 @@ export const sessionRecordingPlayerLogic = kea( 'fullyLoaded', ], playerSettingsLogic, - ['speed', 'skipInactivitySetting'], + ['speed', 'skipInactivitySetting', 'showMouseTail'], userLogic, ['user', 'hasAvailableFeature'], preflightLogic, @@ -549,7 +549,7 @@ export const sessionRecordingPlayerLogic = kea( ...COMMON_REPLAYER_CONFIG, // these two settings are attempts to improve performance of running two Replayers at once // the main player and a preview player - mouseTail: props.mode !== SessionRecordingPlayerMode.Preview, + mouseTail: values.showMouseTail && props.mode !== SessionRecordingPlayerMode.Preview, useVirtualDom: false, plugins, onError: (error) => { diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistSettings.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistSettings.tsx index 972628dd0032b..007319a686f45 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistSettings.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistSettings.tsx @@ -1,44 +1,17 @@ -import { LemonSelect, LemonSwitch } from '@posthog/lemon-ui' +import { LemonSwitch } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' -import { Tooltip } from 'lib/lemon-ui/Tooltip' import { DurationTypeSelect } from 'scenes/session-recordings/filters/DurationTypeSelect' import { playerSettingsLogic } from '../player/playerSettingsLogic' import { sessionRecordingsPlaylistLogic } from './sessionRecordingsPlaylistLogic' export function SessionRecordingsPlaylistSettings(): JSX.Element { - const { autoplayDirection, durationTypeToShow, hideViewedRecordings } = useValues(playerSettingsLogic) - const { setAutoplayDirection, setDurationTypeToShow, setHideViewedRecordings } = useActions(playerSettingsLogic) + const { durationTypeToShow, hideViewedRecordings } = useValues(playerSettingsLogic) + const { setDurationTypeToShow, setHideViewedRecordings } = useActions(playerSettingsLogic) const { orderBy } = useValues(sessionRecordingsPlaylistLogic) return (
- - Autoplay next recording -
({!autoplayDirection ? 'off' : autoplayDirection}) -
- } - placement="right" - > -
- Autoplay - - -
-
Hide viewed { results: ['Recordings filtered by date'], }, ] - } else if (JSON.parse(searchParams.get('session_recording_duration') ?? '{}')['value'] === 600) { + } else if ( + (searchParams.get('having_predicates')?.length || 0) > 0 && + JSON.parse(searchParams.get('having_predicates') || '[]')[0]['value'] === 600 + ) { return [ 200, { @@ -611,8 +614,8 @@ describe('sessionRecordingsPlaylistLogic', () => { type: FilterLogicalOperator.And, values: [ { - type: PropertyFilterType.Recording, - key: 'console_log_level', + type: PropertyFilterType.LogEntry, + key: 'level', operator: PropertyOperator.IContains, value: ['warn', 'error'], }, @@ -634,8 +637,8 @@ describe('sessionRecordingsPlaylistLogic', () => { type: FilterLogicalOperator.And, values: [ { - type: PropertyFilterType.Recording, - key: 'console_log_query', + type: PropertyFilterType.LogEntry, + key: 'message', operator: PropertyOperator.Exact, value: 'this is a test', }, @@ -668,8 +671,8 @@ describe('sessionRecordingsPlaylistLogic', () => { type: FilterLogicalOperator.And, values: [ { - type: PropertyFilterType.Recording, - key: 'console_log_level', + type: PropertyFilterType.LogEntry, + key: 'level', operator: PropertyOperator.IContains, value: ['warn', 'error'], }, @@ -707,9 +710,9 @@ describe('sessionRecordingsPlaylistLogic', () => { }) }) - describe('convertUniversalFiltersToLegacyFilters', () => { + describe('convertUniversalFiltersToRecordingsQuery', () => { it('expands the visited_page filter to a pageview with $current_url property', () => { - const result = convertUniversalFiltersToLegacyFilters({ + const result = convertUniversalFiltersToRecordingsQuery({ ...DEFAULT_RECORDING_FILTERS, filter_group: { type: FilterLogicalOperator.And, @@ -808,15 +811,15 @@ describe('sessionRecordingsPlaylistLogic', () => { { key: 'email', value: ['email@posthog.com'], operator: 'exact', type: 'person' }, { key: 'email', value: ['test@posthog.com'], operator: 'exact', type: 'person' }, { - key: 'console_log_level', + key: 'level', operator: 'exact', - type: 'recording', + type: 'log_entry', value: ['info', 'warn'], }, { - key: 'console_log_query', + key: 'message', operator: 'exact', - type: 'recording', + type: 'log_entry', value: ['this is a query log'], }, ], diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts index 644fa16ab97c0..12336da783084 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts @@ -6,27 +6,29 @@ import { subscriptions } from 'kea-subscriptions' import api from 'lib/api' import { isAnyPropertyfilter } from 'lib/components/PropertyFilters/utils' import { DEFAULT_UNIVERSAL_GROUP_FILTER } from 'lib/components/UniversalFilters/universalFiltersLogic' -import { isActionFilter, isEventFilter, isRecordingPropertyFilter } from 'lib/components/UniversalFilters/utils' +import { + isActionFilter, + isEventFilter, + isLogEntryPropertyFilter, + isRecordingPropertyFilter, +} from 'lib/components/UniversalFilters/utils' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { objectClean } from 'lib/utils' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import posthog from 'posthog-js' +import { NodeKind, RecordingsQuery, RecordingsQueryResponse } from '~/queries/schema' import { - AnyPropertyFilter, - DurationType, EntityTypes, - FilterableLogLevel, FilterLogicalOperator, FilterType, + LegacyRecordingFilters, + LogEntryPropertyFilter, PropertyFilterType, PropertyOperator, RecordingDurationFilter, - RecordingFilters, - RecordingPropertyFilter, RecordingUniversalFilters, SessionRecordingId, - SessionRecordingsResponse, SessionRecordingType, } from '~/types' @@ -36,12 +38,11 @@ import { sessionRecordingsListPropertiesLogic } from './sessionRecordingsListPro import type { sessionRecordingsPlaylistLogicType } from './sessionRecordingsPlaylistLogicType' export type PersonUUID = string -export type SessionOrderingType = DurationType | 'start_time' | 'console_error_count' interface Params { filters?: RecordingUniversalFilters - simpleFilters?: RecordingFilters - advancedFilters?: RecordingFilters + simpleFilters?: LegacyRecordingFilters + advancedFilters?: LegacyRecordingFilters sessionRecordingId?: SessionRecordingId } @@ -65,7 +66,7 @@ interface BackendEventsMatching { } export type MatchingEventsMatchType = NoEventsToMatch | EventNamesMatching | EventUUIDsMatching | BackendEventsMatching -export type SimpleFiltersType = Pick +export type SimpleFiltersType = Pick export const RECORDINGS_LIMIT = 20 export const PINNED_RECORDINGS_LIMIT = 100 // NOTE: This is high but avoids the need for pagination for now... @@ -94,7 +95,7 @@ export const getDefaultFilters = (personUUID?: PersonUUID): RecordingUniversalFi return personUUID ? DEFAULT_PERSON_RECORDING_FILTERS : DEFAULT_RECORDING_FILTERS } -const capturePartialFilters = (filters: Partial): void => { +const capturePartialFilters = (filters: Partial): void => { // capture only the partial filters applied (not the full filters object) // take each key from the filter and change it to `partial_filter_chosen_${key}` const partialFilters = Object.keys(filters).reduce((acc, key) => { @@ -107,33 +108,31 @@ const capturePartialFilters = (filters: Partial): void => { }) } -export function convertUniversalFiltersToLegacyFilters(universalFilters: RecordingUniversalFilters): RecordingFilters { +export function convertUniversalFiltersToRecordingsQuery(universalFilters: RecordingUniversalFilters): RecordingsQuery { const filters = filtersFromUniversalFilterGroups(universalFilters) - const properties: AnyPropertyFilter[] = [] - const events: FilterType['events'] = [] - const actions: FilterType['actions'] = [] - let console_logs: FilterableLogLevel[] = [] - let snapshot_source: AnyPropertyFilter | null = null - let console_search_query = '' + const events: RecordingsQuery['events'] = [] + const actions: RecordingsQuery['actions'] = [] + const properties: RecordingsQuery['properties'] = [] + const console_log_filters: RecordingsQuery['console_log_filters'] = [] + const having_predicates: RecordingsQuery['having_predicates'] = [] + + const durationFilter = universalFilters.duration[0] + + if (durationFilter) { + having_predicates.push(durationFilter) + } filters.forEach((f) => { if (isEventFilter(f)) { events.push(f) } else if (isActionFilter(f)) { actions.push(f) + } else if (isLogEntryPropertyFilter(f)) { + console_log_filters.push(f) } else if (isAnyPropertyfilter(f)) { - if (f.type === PropertyFilterType.Recording) { - if (f.key === 'console_log_level') { - console_logs = f.value as FilterableLogLevel[] - } else if (f.key === 'console_log_query') { - console_search_query = (f.value || '') as string - } else if (f.key === 'snapshot_source') { - const value = f.value as string[] | null - if (value) { - snapshot_source = f - } - } else if (f.key === 'visited_page') { + if (isRecordingPropertyFilter(f)) { + if (f.key === 'visited_page') { events.push({ id: '$pageview', name: '$pageview', @@ -147,6 +146,8 @@ export function convertUniversalFiltersToLegacyFilters(universalFilters: Recordi }, ], }) + } else if (f.key === 'snapshot_source' && f.value) { + having_predicates.push(f) } } else { properties.push(f) @@ -154,49 +155,48 @@ export function convertUniversalFiltersToLegacyFilters(universalFilters: Recordi } }) - const durationFilter = universalFilters.duration[0] - return { - ...universalFilters, + kind: NodeKind.RecordingsQuery, + order: 'start_time', + date_from: universalFilters.date_from, + date_to: universalFilters.date_to, properties, events, actions, - session_recording_duration: { ...durationFilter, key: 'duration' }, - duration_type_filter: durationFilter.key, - console_search_query, - console_logs, - snapshot_source, + console_log_filters, + having_predicates, + filter_test_accounts: universalFilters.filter_test_accounts, operand: universalFilters.filter_group.type, } } export function convertLegacyFiltersToUniversalFilters( - simpleFilters?: RecordingFilters, - advancedFilters?: RecordingFilters + simpleFilters?: LegacyRecordingFilters, + advancedFilters?: LegacyRecordingFilters ): RecordingUniversalFilters { - const filters = combineRecordingFilters(simpleFilters || {}, advancedFilters || {}) + const filters = combineLegacyRecordingFilters(simpleFilters || {}, advancedFilters || {}) const events = filters.events ?? [] const actions = filters.actions ?? [] const properties = filters.properties ?? [] - const logLevelFilters: RecordingPropertyFilter[] = + const logLevelFilters: LogEntryPropertyFilter[] = filters.console_logs && filters.console_logs.length > 0 ? [ { - key: 'console_log_level', + key: 'level', value: filters.console_logs, operator: PropertyOperator.Exact, - type: PropertyFilterType.Recording, + type: PropertyFilterType.LogEntry, }, ] : [] - const logQueryFilters: RecordingPropertyFilter[] = filters.console_search_query + const logQueryFilters: LogEntryPropertyFilter[] = filters.console_search_query ? [ { - key: 'console_log_query', + key: 'message', value: [filters.console_search_query], operator: PropertyOperator.Exact, - type: PropertyFilterType.Recording, + type: PropertyFilterType.LogEntry, }, ] : [] @@ -223,7 +223,10 @@ export function convertLegacyFiltersToUniversalFilters( } } -function combineRecordingFilters(simpleFilters: RecordingFilters, advancedFilters: RecordingFilters): RecordingFilters { +function combineLegacyRecordingFilters( + simpleFilters: LegacyRecordingFilters, + advancedFilters: LegacyRecordingFilters +): LegacyRecordingFilters { return { ...advancedFilters, events: [...(simpleFilters?.events || []), ...(advancedFilters?.events || [])], @@ -274,7 +277,7 @@ export const sessionRecordingsPlaylistLogic = kea) => ({ filters }), setShowFilters: (showFilters: boolean) => ({ showFilters }), setShowSettings: (showSettings: boolean) => ({ showSettings }), - setOrderBy: (orderBy: SessionOrderingType) => ({ orderBy }), + setOrderBy: (orderBy: RecordingsQuery['order']) => ({ orderBy }), resetFilters: true, setSelectedRecordingId: (id: SessionRecordingType['id'] | null) => ({ id, @@ -312,7 +315,8 @@ export const sessionRecordingsPlaylistLogic = kea, { loadEventsHaveSessionId: async () => { - const events: FilterType['events'] = convertUniversalFiltersToLegacyFilters(values.filters).events + const filters = filtersFromUniversalFilterGroups(values.filters) + const events: FilterType['events'] = filters.filter(isEventFilter) if (events === undefined || events.length === 0) { return {} @@ -329,33 +333,31 @@ export const sessionRecordingsPlaylistLogic = kea { - const params = { - // TODO: requires a backend change so will include in a separate PR - ...convertUniversalFiltersToLegacyFilters(values.filters), + const params: RecordingsQuery = { + ...convertUniversalFiltersToRecordingsQuery(values.filters), person_uuid: props.personUUID ?? '', - target_entity_order: values.orderBy, + order: values.orderBy, limit: RECORDINGS_LIMIT, } if (values.orderBy === 'start_time') { if (direction === 'older') { - params['date_to'] = - values.sessionRecordings[values.sessionRecordings.length - 1]?.start_time + params.date_to = values.sessionRecordings[values.sessionRecordings.length - 1]?.start_time } if (direction === 'newer') { - params['date_from'] = values.sessionRecordings[0]?.start_time + params.date_from = values.sessionRecordings[0]?.start_time } } else { if (direction === 'older') { - params['offset'] = values.sessionRecordings.length + params.offset = values.sessionRecordings.length } if (direction === 'newer') { - params['offset'] = 0 + params.offset = 0 } } @@ -396,7 +398,9 @@ export const sessionRecordingsPlaylistLogic = kea ({ orderBy: [ - 'start_time' as SessionOrderingType, + 'start_time' as RecordingsQuery['order'], { persist: true }, { setOrderBy: (_, { orderBy }) => orderBy, diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.ts index a40d50e1f49b9..7aea380bae5d6 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistSceneLogic.ts @@ -18,7 +18,7 @@ import { urls } from 'scenes/urls' import { cohortsModel } from '~/models/cohortsModel' import { Breadcrumb, - RecordingFilters, + LegacyRecordingFilters, RecordingUniversalFilters, ReplayTabs, SessionRecordingPlaylistType, @@ -46,7 +46,7 @@ export const sessionRecordingsPlaylistSceneLogic = kea ({ filters }), + setFilters: (filters: LegacyRecordingFilters | RecordingUniversalFilters | null) => ({ filters }), loadPinnedRecordings: true, onPinnedChange: (recording: SessionRecordingType, pinned: boolean) => ({ pinned, recording }), }), @@ -116,7 +116,7 @@ export const sessionRecordingsPlaylistSceneLogic = kea ({ filters: [ - null as RecordingFilters | RecordingUniversalFilters | null, + null as LegacyRecordingFilters | RecordingUniversalFilters | null, { getPlaylistSuccess: (_, { playlist }) => playlist?.filters || null, updatePlaylistSuccess: (_, { playlist }) => playlist?.filters || null, diff --git a/frontend/src/scenes/session-recordings/utils.ts b/frontend/src/scenes/session-recordings/utils.ts index 9dfb2eab13664..273152331b5a4 100644 --- a/frontend/src/scenes/session-recordings/utils.ts +++ b/frontend/src/scenes/session-recordings/utils.ts @@ -1,9 +1,9 @@ import { UniversalFiltersGroup, UniversalFilterValue } from 'lib/components/UniversalFilters/UniversalFilters' -import { RecordingFilters, RecordingUniversalFilters } from '~/types' +import { LegacyRecordingFilters, RecordingUniversalFilters } from '~/types' export const isUniversalFilters = ( - filters: RecordingUniversalFilters | RecordingFilters + filters: RecordingUniversalFilters | LegacyRecordingFilters ): filters is RecordingUniversalFilters => { return 'filter_group' in filters } diff --git a/frontend/src/scenes/settings/project/BounceRatePageViewMode.tsx b/frontend/src/scenes/settings/project/BounceRatePageViewMode.tsx index 47a45d0243615..133f1869a44e5 100644 --- a/frontend/src/scenes/settings/project/BounceRatePageViewMode.tsx +++ b/frontend/src/scenes/settings/project/BounceRatePageViewMode.tsx @@ -33,6 +33,18 @@ const bounceRatePageViewModeOptions: LemonRadioOption[] ), }, + { + value: 'uniq_page_screen_autocaptures', + label: ( + <> +
Use uniqUpTo
+
+ Uses the uniqUpTo function to count if the total unique pageviews + screen events + + autocaptures is >e; 2 +
+ + ), + }, ] export function BounceRatePageViewModeSetting(): JSX.Element { diff --git a/frontend/src/scenes/trends/viz/ActionsHorizontalBar.tsx b/frontend/src/scenes/trends/viz/ActionsHorizontalBar.tsx index 0fbdecb949d35..9866e42c4cf56 100644 --- a/frontend/src/scenes/trends/viz/ActionsHorizontalBar.tsx +++ b/frontend/src/scenes/trends/viz/ActionsHorizontalBar.tsx @@ -48,12 +48,19 @@ export function ActionsHorizontalBar({ showPersonsModal = true }: ChartParams): personsValues: _data.map((item) => item.persons), breakdownValues: _data.map((item) => item.breakdown_value), breakdownLabels: _data.map((item) => { - return formatBreakdownLabel( + const itemLabel = item.action.custom_name ?? item.action.name ?? item.action.id + if (!item.breakdown_value) { + return itemLabel + } + + const breakdownLabel = formatBreakdownLabel( item.breakdown_value, breakdownFilter, cohorts, formatPropertyValueForDisplay ) + + return `${itemLabel} - ${breakdownLabel}` }), compareLabels: _data.map((item) => item.compare_label), backgroundColor: colorList, diff --git a/frontend/src/toolbar/elements/heatmapLogic.ts b/frontend/src/toolbar/elements/heatmapLogic.ts index e97b1bf998532..f297110587e1a 100644 --- a/frontend/src/toolbar/elements/heatmapLogic.ts +++ b/frontend/src/toolbar/elements/heatmapLogic.ts @@ -16,6 +16,7 @@ import { } from 'lib/components/heatmaps/types' import { calculateViewportRange, DEFAULT_HEATMAP_FILTERS } from 'lib/components/heatmaps/utils' import { dateFilterToText } from 'lib/utils' +import { createVersionChecker } from 'lib/utils/semver' import { PostHog } from 'posthog-js' import { collectAllElementsDeep, querySelectorAllDeep } from 'query-selector-shadow-dom' @@ -28,7 +29,7 @@ import { FilterType, PropertyFilterType, PropertyOperator } from '~/types' import type { heatmapLogicType } from './heatmapLogicType' -export const SCROLL_DEPTH_JS_VERSION = [1, 99] +export const doesVersionSupportScrollDepth = createVersionChecker('1.99') const emptyElementsStatsPages: PaginatedResponse = { next: undefined, @@ -438,18 +439,15 @@ export const heatmapLogic = kea([ (s) => [s.posthog], (posthog: PostHog): 'version' | 'disabled' | null => { const posthogVersion = - posthog?._calculate_event_properties('test', {}, new Date())?.['$lib_version'] ?? '0.0.0' - const majorMinorVersion = posthogVersion.split('.') - const majorVersion = parseInt(majorMinorVersion[0], 10) - const minorVersion = parseInt(majorMinorVersion[1], 10) + posthog?.version ?? + posthog?._calculate_event_properties('test', {}, new Date())?.['$lib_version'] ?? + '0.0.0' if (!(posthog as any)?.scrollManager?.scrollY) { return 'version' } - const isSupported = - majorVersion > SCROLL_DEPTH_JS_VERSION[0] || - (majorVersion === SCROLL_DEPTH_JS_VERSION[0] && minorVersion >= SCROLL_DEPTH_JS_VERSION[1]) + const isSupported = doesVersionSupportScrollDepth(posthogVersion) const isDisabled = posthog?.config.disable_scroll_properties return !isSupported ? 'version' : isDisabled ? 'disabled' : null diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 9e1048d9eeb52..7bdb1a9b8b306 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -33,7 +33,6 @@ import { Scene } from 'scenes/sceneTypes' import { QueryContext } from '~/queries/types' import type { - AnomalyCondition, DashboardFilter, DatabaseSchemaField, HogQLQuery, @@ -702,6 +701,7 @@ export enum PropertyFilterType { Session = 'session', Cohort = 'cohort', Recording = 'recording', + LogEntry = 'log_entry', Group = 'group', HogQL = 'hogql', DataWarehouse = 'data_warehouse', @@ -789,6 +789,7 @@ export type AnyPropertyFilter = | SessionPropertyFilter | CohortPropertyFilter | RecordingPropertyFilter + | LogEntryPropertyFilter | GroupPropertyFilter | FeaturePropertyFilter | HogQLPropertyFilter @@ -959,7 +960,7 @@ export type ActionStepProperties = export interface RecordingPropertyFilter extends BasePropertyFilter { type: PropertyFilterType.Recording - key: DurationType | 'console_log_level' | 'console_log_query' | 'snapshot_source' | 'visited_page' + key: DurationType | 'snapshot_source' | 'visited_page' operator: PropertyOperator } @@ -968,11 +969,29 @@ export interface RecordingDurationFilter extends RecordingPropertyFilter { value: number } -export type DurationType = 'duration' | 'active_seconds' | 'inactive_seconds' +export interface LogEntryPropertyFilter extends BasePropertyFilter { + type: PropertyFilterType.LogEntry + operator: PropertyOperator +} + +export interface LogEntryPropertyFilter extends BasePropertyFilter { + type: PropertyFilterType.LogEntry + operator: PropertyOperator +} + +export interface LogEntryLevelFilter extends LogEntryPropertyFilter { + key: 'level' + value: FilterableLogLevel[] +} +export interface LogEntryMessageFilter extends LogEntryPropertyFilter { + key: 'message' + value: string +} +export type DurationType = 'duration' | 'active_seconds' | 'inactive_seconds' export type FilterableLogLevel = 'info' | 'warn' | 'error' -export interface RecordingFilters { +export interface LegacyRecordingFilters { date_from?: string | null date_to?: string | null events?: FilterType['events'] @@ -980,9 +999,9 @@ export interface RecordingFilters { properties?: AnyPropertyFilter[] session_recording_duration?: RecordingDurationFilter duration_type_filter?: DurationType - console_search_query?: string + console_search_query?: LogEntryMessageFilter['value'] + console_logs?: LogEntryLevelFilter['value'] snapshot_source?: AnyPropertyFilter | null - console_logs?: FilterableLogLevel[] filter_test_accounts?: boolean operand?: FilterLogicalOperator } @@ -995,11 +1014,6 @@ export interface RecordingUniversalFilters { filter_group: UniversalFiltersGroup } -export interface SessionRecordingsResponse { - results: SessionRecordingType[] - has_next: boolean -} - export type ErrorCluster = { cluster: number sample: string @@ -1309,7 +1323,7 @@ export interface SessionRecordingPlaylistType { created_by: UserBasicType | null last_modified_at: string last_modified_by: UserBasicType | null - filters?: RecordingFilters + filters?: LegacyRecordingFilters } export interface SessionRecordingSegmentType { @@ -1840,6 +1854,7 @@ export interface PluginType { metrics?: Record capabilities?: Record<'jobs' | 'methods' | 'scheduled_tasks', string[] | undefined> public_jobs?: Record + hog_function_migration_available?: boolean } export type AppType = PluginType @@ -4436,14 +4451,6 @@ export type HogFunctionInvocationGlobals = { > } -export interface AlertType { - id: number - name: string - insight?: number - target_value: string - anomaly_condition: AnomalyCondition -} - export type AppMetricsV2Response = { labels: string[] series: { diff --git a/hogql_parser/HogQLLexer.cpp b/hogql_parser/HogQLLexer.cpp index f5583b7fa4ac8..8c8c7c5a2b643 100644 --- a/hogql_parser/HogQLLexer.cpp +++ b/hogql_parser/HogQLLexer.cpp @@ -1,5 +1,5 @@ -// Generated from HogQLLexer.g4 by ANTLR 4.13.1 +// Generated from HogQLLexer.g4 by ANTLR 4.13.2 #include "HogQLLexer.h" @@ -45,7 +45,7 @@ ::antlr4::internal::OnceFlag hogqllexerLexerOnceFlag; #if ANTLR4_USE_THREAD_LOCAL_CACHE static thread_local #endif -HogQLLexerStaticData *hogqllexerLexerStaticData = nullptr; +std::unique_ptr hogqllexerLexerStaticData = nullptr; void hogqllexerLexerInitialize() { #if ANTLR4_USE_THREAD_LOCAL_CACHE @@ -673,7 +673,7 @@ void hogqllexerLexerInitialize() { for (size_t i = 0; i < count; i++) { staticData->decisionToDFA.emplace_back(staticData->atn->getDecisionState(i), i); } - hogqllexerLexerStaticData = staticData.release(); + hogqllexerLexerStaticData = std::move(staticData); } } diff --git a/hogql_parser/HogQLLexer.h b/hogql_parser/HogQLLexer.h index b54914ebc913e..d537cee2ba0cb 100644 --- a/hogql_parser/HogQLLexer.h +++ b/hogql_parser/HogQLLexer.h @@ -1,5 +1,5 @@ -// Generated from HogQLLexer.g4 by ANTLR 4.13.1 +// Generated from HogQLLexer.g4 by ANTLR 4.13.2 #pragma once diff --git a/hogql_parser/HogQLParser.cpp b/hogql_parser/HogQLParser.cpp index e2021ef8c06ae..55a2fca242632 100644 --- a/hogql_parser/HogQLParser.cpp +++ b/hogql_parser/HogQLParser.cpp @@ -1,5 +1,5 @@ -// Generated from HogQLParser.g4 by ANTLR 4.13.1 +// Generated from HogQLParser.g4 by ANTLR 4.13.2 #include "HogQLParserVisitor.h" @@ -40,7 +40,7 @@ ::antlr4::internal::OnceFlag hogqlparserParserOnceFlag; #if ANTLR4_USE_THREAD_LOCAL_CACHE static thread_local #endif -HogQLParserStaticData *hogqlparserParserStaticData = nullptr; +std::unique_ptr hogqlparserParserStaticData = nullptr; void hogqlparserParserInitialize() { #if ANTLR4_USE_THREAD_LOCAL_CACHE @@ -64,13 +64,12 @@ void hogqlparserParserInitialize() { "orderExpr", "ratioExpr", "settingExprList", "settingExpr", "windowExpr", "winPartitionByClause", "winOrderByClause", "winFrameClause", "winFrameExtend", "winFrameBound", "expr", "columnTypeExpr", "columnExprList", "columnExpr", - "columnArgList", "columnArgExpr", "columnLambdaExpr", "hogqlxTagElement", - "hogqlxTagAttribute", "withExprList", "withExpr", "columnIdentifier", - "nestedIdentifier", "tableExpr", "tableFunctionExpr", "tableIdentifier", - "tableArgList", "databaseIdentifier", "floatingLiteral", "numberLiteral", - "literal", "interval", "keyword", "keywordForAlias", "alias", "identifier", - "enumValue", "placeholder", "string", "templateString", "stringContents", - "fullTemplateString", "stringContentsFull" + "columnLambdaExpr", "hogqlxTagElement", "hogqlxTagAttribute", "withExprList", + "withExpr", "columnIdentifier", "nestedIdentifier", "tableExpr", "tableFunctionExpr", + "tableIdentifier", "tableArgList", "databaseIdentifier", "floatingLiteral", + "numberLiteral", "literal", "interval", "keyword", "keywordForAlias", + "alias", "identifier", "enumValue", "placeholder", "string", "templateString", + "stringContents", "fullTemplateString", "stringContentsFull" }, std::vector{ "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", @@ -114,7 +113,7 @@ void hogqlparserParserInitialize() { } ); static const int32_t serializedATNSegment[] = { - 4,1,159,1311,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6, + 4,1,159,1303,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6, 2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13,2,14, 7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20,7,20,2,21, 7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26,2,27,7,27,2,28, @@ -126,484 +125,481 @@ void hogqlparserParserInitialize() { 7,63,2,64,7,64,2,65,7,65,2,66,7,66,2,67,7,67,2,68,7,68,2,69,7,69,2,70, 7,70,2,71,7,71,2,72,7,72,2,73,7,73,2,74,7,74,2,75,7,75,2,76,7,76,2,77, 7,77,2,78,7,78,2,79,7,79,2,80,7,80,2,81,7,81,2,82,7,82,2,83,7,83,2,84, - 7,84,2,85,7,85,2,86,7,86,2,87,7,87,1,0,5,0,178,8,0,10,0,12,0,181,9,0, - 1,0,1,0,1,1,1,1,3,1,187,8,1,1,2,1,2,1,3,1,3,1,3,1,3,1,3,3,3,196,8,3,1, - 4,1,4,1,4,5,4,201,8,4,10,4,12,4,204,9,4,1,4,3,4,207,8,4,1,5,1,5,1,5,1, - 5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,3,5,221,8,5,1,6,1,6,3,6,225,8,6,1,6, - 3,6,228,8,6,1,7,1,7,3,7,232,8,7,1,7,3,7,235,8,7,1,8,1,8,1,8,1,8,1,8,3, - 8,242,8,8,1,8,1,8,3,8,246,8,8,1,8,1,8,1,9,1,9,1,9,5,9,253,8,9,10,9,12, - 9,256,9,9,1,9,1,9,3,9,260,8,9,1,10,1,10,1,10,1,10,1,10,1,10,1,10,3,10, - 269,8,10,1,11,1,11,1,11,1,11,1,11,1,11,3,11,277,8,11,1,12,1,12,1,12,1, - 12,1,12,3,12,284,8,12,1,12,1,12,3,12,288,8,12,1,12,1,12,1,12,1,12,3,12, - 294,8,12,1,12,1,12,1,12,3,12,299,8,12,1,13,1,13,1,13,1,13,1,13,1,13,3, - 13,307,8,13,1,13,1,13,1,13,1,13,1,13,3,13,314,8,13,1,14,1,14,1,14,1,14, - 3,14,320,8,14,1,14,1,14,1,14,1,15,1,15,1,15,1,15,1,15,1,16,1,16,3,16, - 332,8,16,1,17,1,17,1,18,1,18,5,18,338,8,18,10,18,12,18,341,9,18,1,18, - 1,18,1,19,1,19,1,19,1,19,1,20,1,20,1,20,5,20,352,8,20,10,20,12,20,355, - 9,20,1,20,3,20,358,8,20,1,21,1,21,1,21,3,21,363,8,21,1,21,1,21,1,22,1, - 22,1,22,1,22,5,22,371,8,22,10,22,12,22,374,9,22,1,23,1,23,1,23,1,23,1, - 23,1,23,3,23,382,8,23,1,24,3,24,385,8,24,1,24,1,24,3,24,389,8,24,1,24, - 3,24,392,8,24,1,24,1,24,3,24,396,8,24,1,24,3,24,399,8,24,1,24,3,24,402, - 8,24,1,24,3,24,405,8,24,1,24,3,24,408,8,24,1,24,1,24,3,24,412,8,24,1, - 24,1,24,3,24,416,8,24,1,24,3,24,419,8,24,1,24,3,24,422,8,24,1,24,3,24, - 425,8,24,1,24,1,24,3,24,429,8,24,1,24,3,24,432,8,24,1,25,1,25,1,25,1, - 26,1,26,1,26,1,26,3,26,441,8,26,1,27,1,27,1,27,1,28,3,28,447,8,28,1,28, - 1,28,1,28,1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29, - 1,29,1,29,5,29,466,8,29,10,29,12,29,469,9,29,1,30,1,30,1,30,1,31,1,31, - 1,31,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,3,32,485,8,32,1,33,1,33, - 1,33,1,34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,3,36, - 502,8,36,1,36,1,36,1,36,1,36,3,36,508,8,36,1,36,1,36,1,36,1,36,3,36,514, - 8,36,1,36,1,36,1,36,1,36,1,36,1,36,1,36,1,36,1,36,3,36,525,8,36,3,36, - 527,8,36,1,37,1,37,1,37,1,38,1,38,1,38,1,39,1,39,1,39,3,39,538,8,39,1, - 39,3,39,541,8,39,1,39,1,39,1,39,1,39,3,39,547,8,39,1,39,1,39,1,39,1,39, - 1,39,1,39,3,39,555,8,39,1,39,1,39,1,39,1,39,5,39,561,8,39,10,39,12,39, - 564,9,39,1,40,3,40,567,8,40,1,40,1,40,1,40,3,40,572,8,40,1,40,3,40,575, - 8,40,1,40,3,40,578,8,40,1,40,1,40,3,40,582,8,40,1,40,1,40,3,40,586,8, - 40,1,40,3,40,589,8,40,3,40,591,8,40,1,40,3,40,594,8,40,1,40,1,40,3,40, - 598,8,40,1,40,1,40,3,40,602,8,40,1,40,3,40,605,8,40,3,40,607,8,40,3,40, - 609,8,40,1,41,1,41,1,41,3,41,614,8,41,1,42,1,42,1,42,1,42,1,42,1,42,1, - 42,1,42,1,42,3,42,625,8,42,1,43,1,43,1,43,1,43,3,43,631,8,43,1,44,1,44, - 1,44,5,44,636,8,44,10,44,12,44,639,9,44,1,45,1,45,3,45,643,8,45,1,45, - 1,45,3,45,647,8,45,1,45,1,45,3,45,651,8,45,1,46,1,46,1,46,1,46,3,46,657, - 8,46,3,46,659,8,46,1,47,1,47,1,47,5,47,664,8,47,10,47,12,47,667,9,47, - 1,48,1,48,1,48,1,48,1,49,3,49,674,8,49,1,49,3,49,677,8,49,1,49,3,49,680, - 8,49,1,50,1,50,1,50,1,50,1,51,1,51,1,51,1,51,1,52,1,52,1,52,1,53,1,53, - 1,53,1,53,1,53,1,53,3,53,699,8,53,1,54,1,54,1,54,1,54,1,54,1,54,1,54, - 1,54,1,54,1,54,1,54,1,54,3,54,713,8,54,1,55,1,55,1,55,1,56,1,56,1,56, - 1,56,1,56,1,56,1,56,1,56,1,56,5,56,727,8,56,10,56,12,56,730,9,56,1,56, - 3,56,733,8,56,1,56,1,56,1,56,1,56,1,56,1,56,1,56,5,56,742,8,56,10,56, - 12,56,745,9,56,1,56,3,56,748,8,56,1,56,1,56,1,56,1,56,1,56,1,56,1,56, - 5,56,757,8,56,10,56,12,56,760,9,56,1,56,3,56,763,8,56,1,56,1,56,1,56, - 1,56,1,56,3,56,770,8,56,1,56,1,56,3,56,774,8,56,1,57,1,57,1,57,5,57,779, - 8,57,10,57,12,57,782,9,57,1,57,3,57,785,8,57,1,58,1,58,1,58,3,58,790, - 8,58,1,58,1,58,1,58,1,58,1,58,4,58,797,8,58,11,58,12,58,798,1,58,1,58, - 3,58,803,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, - 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,827,8,58, + 7,84,2,85,7,85,1,0,5,0,174,8,0,10,0,12,0,177,9,0,1,0,1,0,1,1,1,1,3,1, + 183,8,1,1,2,1,2,1,3,1,3,1,3,1,3,1,3,3,3,192,8,3,1,4,1,4,1,4,5,4,197,8, + 4,10,4,12,4,200,9,4,1,4,3,4,203,8,4,1,5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,1, + 5,1,5,1,5,1,5,3,5,217,8,5,1,6,1,6,3,6,221,8,6,1,6,3,6,224,8,6,1,7,1,7, + 3,7,228,8,7,1,7,3,7,231,8,7,1,8,1,8,1,8,1,8,1,8,3,8,238,8,8,1,8,1,8,3, + 8,242,8,8,1,8,1,8,1,9,1,9,1,9,5,9,249,8,9,10,9,12,9,252,9,9,1,9,1,9,3, + 9,256,8,9,1,10,1,10,1,10,1,10,1,10,1,10,1,10,3,10,265,8,10,1,11,1,11, + 1,11,1,11,1,11,1,11,3,11,273,8,11,1,12,1,12,1,12,1,12,1,12,3,12,280,8, + 12,1,12,1,12,3,12,284,8,12,1,12,1,12,1,12,1,12,3,12,290,8,12,1,12,1,12, + 1,12,3,12,295,8,12,1,13,1,13,1,13,1,13,1,13,1,13,3,13,303,8,13,1,13,1, + 13,1,13,1,13,1,13,3,13,310,8,13,1,14,1,14,1,14,1,14,3,14,316,8,14,1,14, + 1,14,1,14,1,15,1,15,1,15,1,15,1,15,1,16,1,16,3,16,328,8,16,1,17,1,17, + 1,18,1,18,5,18,334,8,18,10,18,12,18,337,9,18,1,18,1,18,1,19,1,19,1,19, + 1,19,1,20,1,20,1,20,5,20,348,8,20,10,20,12,20,351,9,20,1,20,3,20,354, + 8,20,1,21,1,21,1,21,3,21,359,8,21,1,21,1,21,1,22,1,22,1,22,1,22,5,22, + 367,8,22,10,22,12,22,370,9,22,1,23,1,23,1,23,1,23,1,23,1,23,3,23,378, + 8,23,1,24,3,24,381,8,24,1,24,1,24,3,24,385,8,24,1,24,3,24,388,8,24,1, + 24,1,24,3,24,392,8,24,1,24,3,24,395,8,24,1,24,3,24,398,8,24,1,24,3,24, + 401,8,24,1,24,3,24,404,8,24,1,24,1,24,3,24,408,8,24,1,24,1,24,3,24,412, + 8,24,1,24,3,24,415,8,24,1,24,3,24,418,8,24,1,24,3,24,421,8,24,1,24,1, + 24,3,24,425,8,24,1,24,3,24,428,8,24,1,25,1,25,1,25,1,26,1,26,1,26,1,26, + 3,26,437,8,26,1,27,1,27,1,27,1,28,3,28,443,8,28,1,28,1,28,1,28,1,28,1, + 29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,5,29,462, + 8,29,10,29,12,29,465,9,29,1,30,1,30,1,30,1,31,1,31,1,31,1,32,1,32,1,32, + 1,32,1,32,1,32,1,32,1,32,3,32,481,8,32,1,33,1,33,1,33,1,34,1,34,1,34, + 1,34,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,3,36,498,8,36,1,36,1,36, + 1,36,1,36,3,36,504,8,36,1,36,1,36,1,36,1,36,3,36,510,8,36,1,36,1,36,1, + 36,1,36,1,36,1,36,1,36,1,36,1,36,3,36,521,8,36,3,36,523,8,36,1,37,1,37, + 1,37,1,38,1,38,1,38,1,39,1,39,1,39,3,39,534,8,39,1,39,3,39,537,8,39,1, + 39,1,39,1,39,1,39,3,39,543,8,39,1,39,1,39,1,39,1,39,1,39,1,39,3,39,551, + 8,39,1,39,1,39,1,39,1,39,5,39,557,8,39,10,39,12,39,560,9,39,1,40,3,40, + 563,8,40,1,40,1,40,1,40,3,40,568,8,40,1,40,3,40,571,8,40,1,40,3,40,574, + 8,40,1,40,1,40,3,40,578,8,40,1,40,1,40,3,40,582,8,40,1,40,3,40,585,8, + 40,3,40,587,8,40,1,40,3,40,590,8,40,1,40,1,40,3,40,594,8,40,1,40,1,40, + 3,40,598,8,40,1,40,3,40,601,8,40,3,40,603,8,40,3,40,605,8,40,1,41,1,41, + 1,41,3,41,610,8,41,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,3,42, + 621,8,42,1,43,1,43,1,43,1,43,3,43,627,8,43,1,44,1,44,1,44,5,44,632,8, + 44,10,44,12,44,635,9,44,1,45,1,45,3,45,639,8,45,1,45,1,45,3,45,643,8, + 45,1,45,1,45,3,45,647,8,45,1,46,1,46,1,46,1,46,3,46,653,8,46,3,46,655, + 8,46,1,47,1,47,1,47,5,47,660,8,47,10,47,12,47,663,9,47,1,48,1,48,1,48, + 1,48,1,49,3,49,670,8,49,1,49,3,49,673,8,49,1,49,3,49,676,8,49,1,50,1, + 50,1,50,1,50,1,51,1,51,1,51,1,51,1,52,1,52,1,52,1,53,1,53,1,53,1,53,1, + 53,1,53,3,53,695,8,53,1,54,1,54,1,54,1,54,1,54,1,54,1,54,1,54,1,54,1, + 54,1,54,1,54,3,54,709,8,54,1,55,1,55,1,55,1,56,1,56,1,56,1,56,1,56,1, + 56,1,56,1,56,1,56,5,56,723,8,56,10,56,12,56,726,9,56,1,56,3,56,729,8, + 56,1,56,1,56,1,56,1,56,1,56,1,56,1,56,5,56,738,8,56,10,56,12,56,741,9, + 56,1,56,3,56,744,8,56,1,56,1,56,1,56,1,56,1,56,1,56,1,56,5,56,753,8,56, + 10,56,12,56,756,9,56,1,56,3,56,759,8,56,1,56,1,56,1,56,1,56,1,56,3,56, + 766,8,56,1,56,1,56,3,56,770,8,56,1,57,1,57,1,57,5,57,775,8,57,10,57,12, + 57,778,9,57,1,57,3,57,781,8,57,1,58,1,58,1,58,3,58,786,8,58,1,58,1,58, + 1,58,1,58,1,58,4,58,793,8,58,11,58,12,58,794,1,58,1,58,3,58,799,8,58, 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, - 1,58,3,58,844,8,58,1,58,1,58,1,58,1,58,3,58,850,8,58,1,58,3,58,853,8, - 58,1,58,3,58,856,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,866, - 8,58,1,58,1,58,1,58,1,58,3,58,872,8,58,1,58,3,58,875,8,58,1,58,3,58,878, - 8,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,886,8,58,1,58,3,58,889,8,58,1, - 58,1,58,3,58,893,8,58,1,58,3,58,896,8,58,1,58,1,58,1,58,1,58,1,58,1,58, - 1,58,1,58,1,58,1,58,1,58,1,58,3,58,910,8,58,1,58,1,58,1,58,1,58,1,58, - 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,927,8,58,1,58, - 1,58,1,58,3,58,932,8,58,1,58,1,58,3,58,936,8,58,1,58,1,58,1,58,1,58,3, - 58,942,8,58,1,58,1,58,1,58,1,58,1,58,3,58,949,8,58,1,58,1,58,1,58,1,58, - 1,58,1,58,1,58,1,58,1,58,1,58,3,58,961,8,58,1,58,1,58,3,58,965,8,58,1, - 58,3,58,968,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,977,8,58,1,58, - 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,991,8,58, + 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,823,8,58,1,58,1,58,1,58, + 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,840, + 8,58,1,58,1,58,1,58,1,58,3,58,846,8,58,1,58,3,58,849,8,58,1,58,3,58,852, + 8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,862,8,58,1,58,1,58, + 1,58,1,58,3,58,868,8,58,1,58,3,58,871,8,58,1,58,3,58,874,8,58,1,58,1, + 58,1,58,1,58,1,58,1,58,3,58,882,8,58,1,58,3,58,885,8,58,1,58,1,58,3,58, + 889,8,58,1,58,3,58,892,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1, + 58,1,58,1,58,1,58,3,58,906,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1, + 58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,923,8,58,1,58,1,58,1,58,3, + 58,928,8,58,1,58,1,58,1,58,3,58,933,8,58,1,58,1,58,1,58,1,58,3,58,939, + 8,58,1,58,1,58,1,58,1,58,1,58,3,58,946,8,58,1,58,1,58,1,58,1,58,1,58, + 1,58,1,58,1,58,1,58,1,58,3,58,958,8,58,1,58,1,58,3,58,962,8,58,1,58,3, + 58,965,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,974,8,58,1,58,1,58, + 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,988,8,58,1,58, + 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58, + 1004,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, - 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, - 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,1030,8,58,1,58,1,58, - 1,58,1,58,1,58,1,58,3,58,1038,8,58,5,58,1040,8,58,10,58,12,58,1043,9, - 58,1,59,1,59,1,59,5,59,1048,8,59,10,59,12,59,1051,9,59,1,59,3,59,1054, - 8,59,1,60,1,60,3,60,1058,8,60,1,61,1,61,1,61,1,61,5,61,1064,8,61,10,61, - 12,61,1067,9,61,1,61,3,61,1070,8,61,1,61,1,61,1,61,1,61,1,61,5,61,1077, - 8,61,10,61,12,61,1080,9,61,1,61,3,61,1083,8,61,3,61,1085,8,61,1,61,1, - 61,1,61,1,62,1,62,1,62,5,62,1093,8,62,10,62,12,62,1096,9,62,1,62,1,62, - 1,62,1,62,1,62,1,62,5,62,1104,8,62,10,62,12,62,1107,9,62,1,62,1,62,1, - 62,1,62,1,62,1,62,3,62,1115,8,62,1,62,1,62,1,62,1,62,1,62,3,62,1122,8, - 62,1,63,1,63,1,63,1,63,1,63,1,63,1,63,1,63,1,63,1,63,1,63,3,63,1135,8, - 63,1,64,1,64,1,64,5,64,1140,8,64,10,64,12,64,1143,9,64,1,64,3,64,1146, - 8,64,1,65,1,65,1,65,1,65,1,65,1,65,1,65,1,65,1,65,1,65,3,65,1158,8,65, - 1,66,1,66,1,66,1,66,3,66,1164,8,66,1,66,3,66,1167,8,66,1,67,1,67,1,67, - 5,67,1172,8,67,10,67,12,67,1175,9,67,1,68,1,68,1,68,1,68,1,68,1,68,1, - 68,1,68,1,68,3,68,1186,8,68,1,68,1,68,1,68,1,68,3,68,1192,8,68,5,68,1194, - 8,68,10,68,12,68,1197,9,68,1,69,1,69,1,69,3,69,1202,8,69,1,69,1,69,1, - 70,1,70,1,70,3,70,1209,8,70,1,70,1,70,1,71,1,71,1,71,5,71,1216,8,71,10, - 71,12,71,1219,9,71,1,71,3,71,1222,8,71,1,72,1,72,1,73,1,73,1,73,1,73, - 1,73,1,73,3,73,1232,8,73,3,73,1234,8,73,1,74,3,74,1237,8,74,1,74,1,74, - 1,74,1,74,1,74,1,74,3,74,1245,8,74,1,75,1,75,1,75,3,75,1250,8,75,1,76, - 1,76,1,77,1,77,1,78,1,78,1,79,1,79,3,79,1260,8,79,1,80,1,80,1,80,3,80, - 1265,8,80,1,81,1,81,1,81,1,81,1,82,1,82,1,82,1,82,1,83,1,83,3,83,1277, - 8,83,1,84,1,84,5,84,1281,8,84,10,84,12,84,1284,9,84,1,84,1,84,1,85,1, - 85,1,85,1,85,1,85,3,85,1293,8,85,1,86,1,86,5,86,1297,8,86,10,86,12,86, - 1300,9,86,1,86,1,86,1,87,1,87,1,87,1,87,1,87,3,87,1309,8,87,1,87,0,3, - 78,116,136,88,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38, - 40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72,74,76,78,80,82,84, - 86,88,90,92,94,96,98,100,102,104,106,108,110,112,114,116,118,120,122, - 124,126,128,130,132,134,136,138,140,142,144,146,148,150,152,154,156,158, - 160,162,164,166,168,170,172,174,0,16,2,0,18,18,74,74,2,0,44,44,51,51, - 3,0,1,1,4,4,8,8,4,0,1,1,3,4,8,8,80,80,2,0,51,51,73,73,2,0,1,1,4,4,2,0, - 7,7,22,23,2,0,30,30,49,49,2,0,71,71,76,76,3,0,10,10,50,50,90,90,2,0,41, - 41,53,53,1,0,107,108,2,0,118,118,139,139,7,0,21,21,38,38,55,56,70,70, - 78,78,97,97,103,103,16,0,1,13,15,20,22,28,30,30,32,37,39,42,44,51,53, - 54,58,58,60,69,71,77,79,83,85,92,94,96,98,99,101,102,4,0,20,20,30,30, - 39,39,48,48,1479,0,179,1,0,0,0,2,186,1,0,0,0,4,188,1,0,0,0,6,190,1,0, - 0,0,8,197,1,0,0,0,10,220,1,0,0,0,12,222,1,0,0,0,14,229,1,0,0,0,16,236, - 1,0,0,0,18,249,1,0,0,0,20,261,1,0,0,0,22,270,1,0,0,0,24,278,1,0,0,0,26, - 300,1,0,0,0,28,315,1,0,0,0,30,324,1,0,0,0,32,329,1,0,0,0,34,333,1,0,0, - 0,36,335,1,0,0,0,38,344,1,0,0,0,40,348,1,0,0,0,42,362,1,0,0,0,44,366, - 1,0,0,0,46,381,1,0,0,0,48,384,1,0,0,0,50,433,1,0,0,0,52,436,1,0,0,0,54, - 442,1,0,0,0,56,446,1,0,0,0,58,452,1,0,0,0,60,470,1,0,0,0,62,473,1,0,0, - 0,64,476,1,0,0,0,66,486,1,0,0,0,68,489,1,0,0,0,70,493,1,0,0,0,72,526, - 1,0,0,0,74,528,1,0,0,0,76,531,1,0,0,0,78,546,1,0,0,0,80,608,1,0,0,0,82, - 613,1,0,0,0,84,624,1,0,0,0,86,626,1,0,0,0,88,632,1,0,0,0,90,640,1,0,0, - 0,92,658,1,0,0,0,94,660,1,0,0,0,96,668,1,0,0,0,98,673,1,0,0,0,100,681, - 1,0,0,0,102,685,1,0,0,0,104,689,1,0,0,0,106,698,1,0,0,0,108,712,1,0,0, - 0,110,714,1,0,0,0,112,773,1,0,0,0,114,775,1,0,0,0,116,935,1,0,0,0,118, - 1044,1,0,0,0,120,1057,1,0,0,0,122,1084,1,0,0,0,124,1121,1,0,0,0,126,1134, - 1,0,0,0,128,1136,1,0,0,0,130,1157,1,0,0,0,132,1166,1,0,0,0,134,1168,1, - 0,0,0,136,1185,1,0,0,0,138,1198,1,0,0,0,140,1208,1,0,0,0,142,1212,1,0, - 0,0,144,1223,1,0,0,0,146,1233,1,0,0,0,148,1236,1,0,0,0,150,1249,1,0,0, - 0,152,1251,1,0,0,0,154,1253,1,0,0,0,156,1255,1,0,0,0,158,1259,1,0,0,0, - 160,1264,1,0,0,0,162,1266,1,0,0,0,164,1270,1,0,0,0,166,1276,1,0,0,0,168, - 1278,1,0,0,0,170,1292,1,0,0,0,172,1294,1,0,0,0,174,1308,1,0,0,0,176,178, - 3,2,1,0,177,176,1,0,0,0,178,181,1,0,0,0,179,177,1,0,0,0,179,180,1,0,0, - 0,180,182,1,0,0,0,181,179,1,0,0,0,182,183,5,0,0,1,183,1,1,0,0,0,184,187, - 3,6,3,0,185,187,3,10,5,0,186,184,1,0,0,0,186,185,1,0,0,0,187,3,1,0,0, - 0,188,189,3,116,58,0,189,5,1,0,0,0,190,191,5,52,0,0,191,195,3,160,80, - 0,192,193,5,115,0,0,193,194,5,122,0,0,194,196,3,4,2,0,195,192,1,0,0,0, - 195,196,1,0,0,0,196,7,1,0,0,0,197,202,3,160,80,0,198,199,5,116,0,0,199, - 201,3,160,80,0,200,198,1,0,0,0,201,204,1,0,0,0,202,200,1,0,0,0,202,203, - 1,0,0,0,203,206,1,0,0,0,204,202,1,0,0,0,205,207,5,116,0,0,206,205,1,0, - 0,0,206,207,1,0,0,0,207,9,1,0,0,0,208,221,3,12,6,0,209,221,3,14,7,0,210, - 221,3,18,9,0,211,221,3,20,10,0,212,221,3,22,11,0,213,221,3,26,13,0,214, - 221,3,24,12,0,215,221,3,28,14,0,216,221,3,30,15,0,217,221,3,36,18,0,218, - 221,3,32,16,0,219,221,3,34,17,0,220,208,1,0,0,0,220,209,1,0,0,0,220,210, - 1,0,0,0,220,211,1,0,0,0,220,212,1,0,0,0,220,213,1,0,0,0,220,214,1,0,0, - 0,220,215,1,0,0,0,220,216,1,0,0,0,220,217,1,0,0,0,220,218,1,0,0,0,220, - 219,1,0,0,0,221,11,1,0,0,0,222,224,5,72,0,0,223,225,3,4,2,0,224,223,1, - 0,0,0,224,225,1,0,0,0,225,227,1,0,0,0,226,228,5,150,0,0,227,226,1,0,0, - 0,227,228,1,0,0,0,228,13,1,0,0,0,229,231,5,84,0,0,230,232,3,4,2,0,231, - 230,1,0,0,0,231,232,1,0,0,0,232,234,1,0,0,0,233,235,5,150,0,0,234,233, - 1,0,0,0,234,235,1,0,0,0,235,15,1,0,0,0,236,245,5,14,0,0,237,238,5,130, - 0,0,238,241,3,160,80,0,239,240,5,115,0,0,240,242,3,160,80,0,241,239,1, - 0,0,0,241,242,1,0,0,0,242,243,1,0,0,0,243,244,5,149,0,0,244,246,1,0,0, - 0,245,237,1,0,0,0,245,246,1,0,0,0,246,247,1,0,0,0,247,248,3,36,18,0,248, - 17,1,0,0,0,249,250,5,93,0,0,250,254,3,36,18,0,251,253,3,16,8,0,252,251, - 1,0,0,0,253,256,1,0,0,0,254,252,1,0,0,0,254,255,1,0,0,0,255,259,1,0,0, - 0,256,254,1,0,0,0,257,258,5,29,0,0,258,260,3,36,18,0,259,257,1,0,0,0, - 259,260,1,0,0,0,260,19,1,0,0,0,261,262,5,40,0,0,262,263,5,130,0,0,263, - 264,3,4,2,0,264,265,5,149,0,0,265,268,3,10,5,0,266,267,5,25,0,0,267,269, - 3,10,5,0,268,266,1,0,0,0,268,269,1,0,0,0,269,21,1,0,0,0,270,271,5,100, - 0,0,271,272,5,130,0,0,272,273,3,4,2,0,273,274,5,149,0,0,274,276,3,10, - 5,0,275,277,5,150,0,0,276,275,1,0,0,0,276,277,1,0,0,0,277,23,1,0,0,0, - 278,279,5,33,0,0,279,283,5,130,0,0,280,284,3,6,3,0,281,284,3,30,15,0, - 282,284,3,4,2,0,283,280,1,0,0,0,283,281,1,0,0,0,283,282,1,0,0,0,283,284, - 1,0,0,0,284,285,1,0,0,0,285,287,5,150,0,0,286,288,3,4,2,0,287,286,1,0, - 0,0,287,288,1,0,0,0,288,289,1,0,0,0,289,293,5,150,0,0,290,294,3,6,3,0, - 291,294,3,30,15,0,292,294,3,4,2,0,293,290,1,0,0,0,293,291,1,0,0,0,293, - 292,1,0,0,0,293,294,1,0,0,0,294,295,1,0,0,0,295,296,5,149,0,0,296,298, - 3,10,5,0,297,299,5,150,0,0,298,297,1,0,0,0,298,299,1,0,0,0,299,25,1,0, - 0,0,300,301,5,33,0,0,301,302,5,130,0,0,302,303,5,52,0,0,303,306,3,160, - 80,0,304,305,5,116,0,0,305,307,3,160,80,0,306,304,1,0,0,0,306,307,1,0, - 0,0,307,308,1,0,0,0,308,309,5,42,0,0,309,310,3,4,2,0,310,311,5,149,0, - 0,311,313,3,10,5,0,312,314,5,150,0,0,313,312,1,0,0,0,313,314,1,0,0,0, - 314,27,1,0,0,0,315,316,5,31,0,0,316,317,3,160,80,0,317,319,5,130,0,0, - 318,320,3,8,4,0,319,318,1,0,0,0,319,320,1,0,0,0,320,321,1,0,0,0,321,322, - 5,149,0,0,322,323,3,36,18,0,323,29,1,0,0,0,324,325,3,4,2,0,325,326,5, - 115,0,0,326,327,5,122,0,0,327,328,3,4,2,0,328,31,1,0,0,0,329,331,3,4, - 2,0,330,332,5,150,0,0,331,330,1,0,0,0,331,332,1,0,0,0,332,33,1,0,0,0, - 333,334,5,150,0,0,334,35,1,0,0,0,335,339,5,128,0,0,336,338,3,2,1,0,337, - 336,1,0,0,0,338,341,1,0,0,0,339,337,1,0,0,0,339,340,1,0,0,0,340,342,1, - 0,0,0,341,339,1,0,0,0,342,343,5,147,0,0,343,37,1,0,0,0,344,345,3,4,2, - 0,345,346,5,115,0,0,346,347,3,4,2,0,347,39,1,0,0,0,348,353,3,38,19,0, - 349,350,5,116,0,0,350,352,3,38,19,0,351,349,1,0,0,0,352,355,1,0,0,0,353, - 351,1,0,0,0,353,354,1,0,0,0,354,357,1,0,0,0,355,353,1,0,0,0,356,358,5, - 116,0,0,357,356,1,0,0,0,357,358,1,0,0,0,358,41,1,0,0,0,359,363,3,44,22, - 0,360,363,3,48,24,0,361,363,3,124,62,0,362,359,1,0,0,0,362,360,1,0,0, - 0,362,361,1,0,0,0,363,364,1,0,0,0,364,365,5,0,0,1,365,43,1,0,0,0,366, - 372,3,46,23,0,367,368,5,95,0,0,368,369,5,1,0,0,369,371,3,46,23,0,370, - 367,1,0,0,0,371,374,1,0,0,0,372,370,1,0,0,0,372,373,1,0,0,0,373,45,1, - 0,0,0,374,372,1,0,0,0,375,382,3,48,24,0,376,377,5,130,0,0,377,378,3,44, - 22,0,378,379,5,149,0,0,379,382,1,0,0,0,380,382,3,164,82,0,381,375,1,0, - 0,0,381,376,1,0,0,0,381,380,1,0,0,0,382,47,1,0,0,0,383,385,3,50,25,0, - 384,383,1,0,0,0,384,385,1,0,0,0,385,386,1,0,0,0,386,388,5,79,0,0,387, - 389,5,24,0,0,388,387,1,0,0,0,388,389,1,0,0,0,389,391,1,0,0,0,390,392, - 3,52,26,0,391,390,1,0,0,0,391,392,1,0,0,0,392,393,1,0,0,0,393,395,3,114, - 57,0,394,396,3,54,27,0,395,394,1,0,0,0,395,396,1,0,0,0,396,398,1,0,0, - 0,397,399,3,56,28,0,398,397,1,0,0,0,398,399,1,0,0,0,399,401,1,0,0,0,400, - 402,3,60,30,0,401,400,1,0,0,0,401,402,1,0,0,0,402,404,1,0,0,0,403,405, - 3,62,31,0,404,403,1,0,0,0,404,405,1,0,0,0,405,407,1,0,0,0,406,408,3,64, - 32,0,407,406,1,0,0,0,407,408,1,0,0,0,408,411,1,0,0,0,409,410,5,102,0, - 0,410,412,7,0,0,0,411,409,1,0,0,0,411,412,1,0,0,0,412,415,1,0,0,0,413, - 414,5,102,0,0,414,416,5,89,0,0,415,413,1,0,0,0,415,416,1,0,0,0,416,418, - 1,0,0,0,417,419,3,66,33,0,418,417,1,0,0,0,418,419,1,0,0,0,419,421,1,0, - 0,0,420,422,3,58,29,0,421,420,1,0,0,0,421,422,1,0,0,0,422,424,1,0,0,0, - 423,425,3,68,34,0,424,423,1,0,0,0,424,425,1,0,0,0,425,428,1,0,0,0,426, - 429,3,72,36,0,427,429,3,74,37,0,428,426,1,0,0,0,428,427,1,0,0,0,428,429, - 1,0,0,0,429,431,1,0,0,0,430,432,3,76,38,0,431,430,1,0,0,0,431,432,1,0, - 0,0,432,49,1,0,0,0,433,434,5,102,0,0,434,435,3,128,64,0,435,51,1,0,0, - 0,436,437,5,88,0,0,437,440,5,108,0,0,438,439,5,102,0,0,439,441,5,85,0, - 0,440,438,1,0,0,0,440,441,1,0,0,0,441,53,1,0,0,0,442,443,5,34,0,0,443, - 444,3,78,39,0,444,55,1,0,0,0,445,447,7,1,0,0,446,445,1,0,0,0,446,447, - 1,0,0,0,447,448,1,0,0,0,448,449,5,5,0,0,449,450,5,47,0,0,450,451,3,114, - 57,0,451,57,1,0,0,0,452,453,5,101,0,0,453,454,3,160,80,0,454,455,5,6, - 0,0,455,456,5,130,0,0,456,457,3,98,49,0,457,467,5,149,0,0,458,459,5,116, - 0,0,459,460,3,160,80,0,460,461,5,6,0,0,461,462,5,130,0,0,462,463,3,98, - 49,0,463,464,5,149,0,0,464,466,1,0,0,0,465,458,1,0,0,0,466,469,1,0,0, - 0,467,465,1,0,0,0,467,468,1,0,0,0,468,59,1,0,0,0,469,467,1,0,0,0,470, - 471,5,69,0,0,471,472,3,116,58,0,472,61,1,0,0,0,473,474,5,99,0,0,474,475, - 3,116,58,0,475,63,1,0,0,0,476,477,5,36,0,0,477,484,5,11,0,0,478,479,7, - 0,0,0,479,480,5,130,0,0,480,481,3,114,57,0,481,482,5,149,0,0,482,485, - 1,0,0,0,483,485,3,114,57,0,484,478,1,0,0,0,484,483,1,0,0,0,485,65,1,0, - 0,0,486,487,5,37,0,0,487,488,3,116,58,0,488,67,1,0,0,0,489,490,5,64,0, - 0,490,491,5,11,0,0,491,492,3,88,44,0,492,69,1,0,0,0,493,494,5,64,0,0, - 494,495,5,11,0,0,495,496,3,114,57,0,496,71,1,0,0,0,497,498,5,54,0,0,498, - 501,3,116,58,0,499,500,5,116,0,0,500,502,3,116,58,0,501,499,1,0,0,0,501, - 502,1,0,0,0,502,507,1,0,0,0,503,504,5,102,0,0,504,508,5,85,0,0,505,506, - 5,11,0,0,506,508,3,114,57,0,507,503,1,0,0,0,507,505,1,0,0,0,507,508,1, - 0,0,0,508,527,1,0,0,0,509,510,5,54,0,0,510,513,3,116,58,0,511,512,5,102, - 0,0,512,514,5,85,0,0,513,511,1,0,0,0,513,514,1,0,0,0,514,515,1,0,0,0, - 515,516,5,61,0,0,516,517,3,116,58,0,517,527,1,0,0,0,518,519,5,54,0,0, - 519,520,3,116,58,0,520,521,5,61,0,0,521,524,3,116,58,0,522,523,5,11,0, - 0,523,525,3,114,57,0,524,522,1,0,0,0,524,525,1,0,0,0,525,527,1,0,0,0, - 526,497,1,0,0,0,526,509,1,0,0,0,526,518,1,0,0,0,527,73,1,0,0,0,528,529, - 5,61,0,0,529,530,3,116,58,0,530,75,1,0,0,0,531,532,5,81,0,0,532,533,3, - 94,47,0,533,77,1,0,0,0,534,535,6,39,-1,0,535,537,3,136,68,0,536,538,5, - 28,0,0,537,536,1,0,0,0,537,538,1,0,0,0,538,540,1,0,0,0,539,541,3,86,43, - 0,540,539,1,0,0,0,540,541,1,0,0,0,541,547,1,0,0,0,542,543,5,130,0,0,543, - 544,3,78,39,0,544,545,5,149,0,0,545,547,1,0,0,0,546,534,1,0,0,0,546,542, - 1,0,0,0,547,562,1,0,0,0,548,549,10,3,0,0,549,550,3,82,41,0,550,551,3, - 78,39,4,551,561,1,0,0,0,552,554,10,4,0,0,553,555,3,80,40,0,554,553,1, - 0,0,0,554,555,1,0,0,0,555,556,1,0,0,0,556,557,5,47,0,0,557,558,3,78,39, - 0,558,559,3,84,42,0,559,561,1,0,0,0,560,548,1,0,0,0,560,552,1,0,0,0,561, - 564,1,0,0,0,562,560,1,0,0,0,562,563,1,0,0,0,563,79,1,0,0,0,564,562,1, - 0,0,0,565,567,7,2,0,0,566,565,1,0,0,0,566,567,1,0,0,0,567,568,1,0,0,0, - 568,575,5,44,0,0,569,571,5,44,0,0,570,572,7,2,0,0,571,570,1,0,0,0,571, - 572,1,0,0,0,572,575,1,0,0,0,573,575,7,2,0,0,574,566,1,0,0,0,574,569,1, - 0,0,0,574,573,1,0,0,0,575,609,1,0,0,0,576,578,7,3,0,0,577,576,1,0,0,0, - 577,578,1,0,0,0,578,579,1,0,0,0,579,581,7,4,0,0,580,582,5,65,0,0,581, - 580,1,0,0,0,581,582,1,0,0,0,582,591,1,0,0,0,583,585,7,4,0,0,584,586,5, - 65,0,0,585,584,1,0,0,0,585,586,1,0,0,0,586,588,1,0,0,0,587,589,7,3,0, - 0,588,587,1,0,0,0,588,589,1,0,0,0,589,591,1,0,0,0,590,577,1,0,0,0,590, - 583,1,0,0,0,591,609,1,0,0,0,592,594,7,5,0,0,593,592,1,0,0,0,593,594,1, - 0,0,0,594,595,1,0,0,0,595,597,5,35,0,0,596,598,5,65,0,0,597,596,1,0,0, - 0,597,598,1,0,0,0,598,607,1,0,0,0,599,601,5,35,0,0,600,602,5,65,0,0,601, - 600,1,0,0,0,601,602,1,0,0,0,602,604,1,0,0,0,603,605,7,5,0,0,604,603,1, - 0,0,0,604,605,1,0,0,0,605,607,1,0,0,0,606,593,1,0,0,0,606,599,1,0,0,0, - 607,609,1,0,0,0,608,574,1,0,0,0,608,590,1,0,0,0,608,606,1,0,0,0,609,81, - 1,0,0,0,610,611,5,17,0,0,611,614,5,47,0,0,612,614,5,116,0,0,613,610,1, - 0,0,0,613,612,1,0,0,0,614,83,1,0,0,0,615,616,5,62,0,0,616,625,3,114,57, - 0,617,618,5,96,0,0,618,619,5,130,0,0,619,620,3,114,57,0,620,621,5,149, - 0,0,621,625,1,0,0,0,622,623,5,96,0,0,623,625,3,114,57,0,624,615,1,0,0, - 0,624,617,1,0,0,0,624,622,1,0,0,0,625,85,1,0,0,0,626,627,5,77,0,0,627, - 630,3,92,46,0,628,629,5,61,0,0,629,631,3,92,46,0,630,628,1,0,0,0,630, - 631,1,0,0,0,631,87,1,0,0,0,632,637,3,90,45,0,633,634,5,116,0,0,634,636, - 3,90,45,0,635,633,1,0,0,0,636,639,1,0,0,0,637,635,1,0,0,0,637,638,1,0, - 0,0,638,89,1,0,0,0,639,637,1,0,0,0,640,642,3,116,58,0,641,643,7,6,0,0, - 642,641,1,0,0,0,642,643,1,0,0,0,643,646,1,0,0,0,644,645,5,60,0,0,645, - 647,7,7,0,0,646,644,1,0,0,0,646,647,1,0,0,0,647,650,1,0,0,0,648,649,5, - 16,0,0,649,651,5,110,0,0,650,648,1,0,0,0,650,651,1,0,0,0,651,91,1,0,0, - 0,652,659,3,164,82,0,653,656,3,148,74,0,654,655,5,151,0,0,655,657,3,148, - 74,0,656,654,1,0,0,0,656,657,1,0,0,0,657,659,1,0,0,0,658,652,1,0,0,0, - 658,653,1,0,0,0,659,93,1,0,0,0,660,665,3,96,48,0,661,662,5,116,0,0,662, - 664,3,96,48,0,663,661,1,0,0,0,664,667,1,0,0,0,665,663,1,0,0,0,665,666, - 1,0,0,0,666,95,1,0,0,0,667,665,1,0,0,0,668,669,3,160,80,0,669,670,5,122, - 0,0,670,671,3,150,75,0,671,97,1,0,0,0,672,674,3,100,50,0,673,672,1,0, - 0,0,673,674,1,0,0,0,674,676,1,0,0,0,675,677,3,102,51,0,676,675,1,0,0, - 0,676,677,1,0,0,0,677,679,1,0,0,0,678,680,3,104,52,0,679,678,1,0,0,0, - 679,680,1,0,0,0,680,99,1,0,0,0,681,682,5,67,0,0,682,683,5,11,0,0,683, - 684,3,114,57,0,684,101,1,0,0,0,685,686,5,64,0,0,686,687,5,11,0,0,687, - 688,3,88,44,0,688,103,1,0,0,0,689,690,7,8,0,0,690,691,3,106,53,0,691, - 105,1,0,0,0,692,699,3,108,54,0,693,694,5,9,0,0,694,695,3,108,54,0,695, - 696,5,2,0,0,696,697,3,108,54,0,697,699,1,0,0,0,698,692,1,0,0,0,698,693, - 1,0,0,0,699,107,1,0,0,0,700,701,5,19,0,0,701,713,5,75,0,0,702,703,5,94, - 0,0,703,713,5,68,0,0,704,705,5,94,0,0,705,713,5,32,0,0,706,707,3,148, - 74,0,707,708,5,68,0,0,708,713,1,0,0,0,709,710,3,148,74,0,710,711,5,32, - 0,0,711,713,1,0,0,0,712,700,1,0,0,0,712,702,1,0,0,0,712,704,1,0,0,0,712, - 706,1,0,0,0,712,709,1,0,0,0,713,109,1,0,0,0,714,715,3,116,58,0,715,716, - 5,0,0,1,716,111,1,0,0,0,717,774,3,160,80,0,718,719,3,160,80,0,719,720, - 5,130,0,0,720,721,3,160,80,0,721,728,3,112,56,0,722,723,5,116,0,0,723, - 724,3,160,80,0,724,725,3,112,56,0,725,727,1,0,0,0,726,722,1,0,0,0,727, - 730,1,0,0,0,728,726,1,0,0,0,728,729,1,0,0,0,729,732,1,0,0,0,730,728,1, - 0,0,0,731,733,5,116,0,0,732,731,1,0,0,0,732,733,1,0,0,0,733,734,1,0,0, - 0,734,735,5,149,0,0,735,774,1,0,0,0,736,737,3,160,80,0,737,738,5,130, - 0,0,738,743,3,162,81,0,739,740,5,116,0,0,740,742,3,162,81,0,741,739,1, - 0,0,0,742,745,1,0,0,0,743,741,1,0,0,0,743,744,1,0,0,0,744,747,1,0,0,0, - 745,743,1,0,0,0,746,748,5,116,0,0,747,746,1,0,0,0,747,748,1,0,0,0,748, - 749,1,0,0,0,749,750,5,149,0,0,750,774,1,0,0,0,751,752,3,160,80,0,752, - 753,5,130,0,0,753,758,3,112,56,0,754,755,5,116,0,0,755,757,3,112,56,0, - 756,754,1,0,0,0,757,760,1,0,0,0,758,756,1,0,0,0,758,759,1,0,0,0,759,762, - 1,0,0,0,760,758,1,0,0,0,761,763,5,116,0,0,762,761,1,0,0,0,762,763,1,0, - 0,0,763,764,1,0,0,0,764,765,5,149,0,0,765,774,1,0,0,0,766,767,3,160,80, - 0,767,769,5,130,0,0,768,770,3,114,57,0,769,768,1,0,0,0,769,770,1,0,0, - 0,770,771,1,0,0,0,771,772,5,149,0,0,772,774,1,0,0,0,773,717,1,0,0,0,773, - 718,1,0,0,0,773,736,1,0,0,0,773,751,1,0,0,0,773,766,1,0,0,0,774,113,1, - 0,0,0,775,780,3,116,58,0,776,777,5,116,0,0,777,779,3,116,58,0,778,776, - 1,0,0,0,779,782,1,0,0,0,780,778,1,0,0,0,780,781,1,0,0,0,781,784,1,0,0, - 0,782,780,1,0,0,0,783,785,5,116,0,0,784,783,1,0,0,0,784,785,1,0,0,0,785, - 115,1,0,0,0,786,787,6,58,-1,0,787,789,5,12,0,0,788,790,3,116,58,0,789, - 788,1,0,0,0,789,790,1,0,0,0,790,796,1,0,0,0,791,792,5,98,0,0,792,793, - 3,116,58,0,793,794,5,83,0,0,794,795,3,116,58,0,795,797,1,0,0,0,796,791, - 1,0,0,0,797,798,1,0,0,0,798,796,1,0,0,0,798,799,1,0,0,0,799,802,1,0,0, - 0,800,801,5,25,0,0,801,803,3,116,58,0,802,800,1,0,0,0,802,803,1,0,0,0, - 803,804,1,0,0,0,804,805,5,26,0,0,805,936,1,0,0,0,806,807,5,13,0,0,807, - 808,5,130,0,0,808,809,3,116,58,0,809,810,5,6,0,0,810,811,3,112,56,0,811, - 812,5,149,0,0,812,936,1,0,0,0,813,814,5,20,0,0,814,936,5,110,0,0,815, - 816,5,45,0,0,816,817,3,116,58,0,817,818,3,152,76,0,818,936,1,0,0,0,819, - 820,5,82,0,0,820,821,5,130,0,0,821,822,3,116,58,0,822,823,5,34,0,0,823, - 826,3,116,58,0,824,825,5,33,0,0,825,827,3,116,58,0,826,824,1,0,0,0,826, - 827,1,0,0,0,827,828,1,0,0,0,828,829,5,149,0,0,829,936,1,0,0,0,830,831, - 5,86,0,0,831,936,5,110,0,0,832,833,5,91,0,0,833,834,5,130,0,0,834,835, - 7,9,0,0,835,836,3,166,83,0,836,837,5,34,0,0,837,838,3,116,58,0,838,839, - 5,149,0,0,839,936,1,0,0,0,840,841,3,160,80,0,841,843,5,130,0,0,842,844, - 3,114,57,0,843,842,1,0,0,0,843,844,1,0,0,0,844,845,1,0,0,0,845,846,5, - 149,0,0,846,855,1,0,0,0,847,849,5,130,0,0,848,850,5,24,0,0,849,848,1, - 0,0,0,849,850,1,0,0,0,850,852,1,0,0,0,851,853,3,118,59,0,852,851,1,0, - 0,0,852,853,1,0,0,0,853,854,1,0,0,0,854,856,5,149,0,0,855,847,1,0,0,0, - 855,856,1,0,0,0,856,857,1,0,0,0,857,858,5,66,0,0,858,859,5,130,0,0,859, - 860,3,98,49,0,860,861,5,149,0,0,861,936,1,0,0,0,862,863,3,160,80,0,863, - 865,5,130,0,0,864,866,3,114,57,0,865,864,1,0,0,0,865,866,1,0,0,0,866, - 867,1,0,0,0,867,868,5,149,0,0,868,877,1,0,0,0,869,871,5,130,0,0,870,872, - 5,24,0,0,871,870,1,0,0,0,871,872,1,0,0,0,872,874,1,0,0,0,873,875,3,118, - 59,0,874,873,1,0,0,0,874,875,1,0,0,0,875,876,1,0,0,0,876,878,5,149,0, - 0,877,869,1,0,0,0,877,878,1,0,0,0,878,879,1,0,0,0,879,880,5,66,0,0,880, - 881,3,160,80,0,881,936,1,0,0,0,882,888,3,160,80,0,883,885,5,130,0,0,884, - 886,3,114,57,0,885,884,1,0,0,0,885,886,1,0,0,0,886,887,1,0,0,0,887,889, - 5,149,0,0,888,883,1,0,0,0,888,889,1,0,0,0,889,890,1,0,0,0,890,892,5,130, - 0,0,891,893,5,24,0,0,892,891,1,0,0,0,892,893,1,0,0,0,893,895,1,0,0,0, - 894,896,3,118,59,0,895,894,1,0,0,0,895,896,1,0,0,0,896,897,1,0,0,0,897, - 898,5,149,0,0,898,936,1,0,0,0,899,936,3,124,62,0,900,936,3,168,84,0,901, - 936,3,150,75,0,902,903,5,118,0,0,903,936,3,116,58,19,904,905,5,58,0,0, - 905,936,3,116,58,13,906,907,3,140,70,0,907,908,5,120,0,0,908,910,1,0, - 0,0,909,906,1,0,0,0,909,910,1,0,0,0,910,911,1,0,0,0,911,936,5,112,0,0, - 912,913,5,130,0,0,913,914,3,44,22,0,914,915,5,149,0,0,915,936,1,0,0,0, - 916,917,5,130,0,0,917,918,3,116,58,0,918,919,5,149,0,0,919,936,1,0,0, - 0,920,921,5,130,0,0,921,922,3,114,57,0,922,923,5,149,0,0,923,936,1,0, - 0,0,924,926,5,129,0,0,925,927,3,114,57,0,926,925,1,0,0,0,926,927,1,0, - 0,0,927,928,1,0,0,0,928,936,5,148,0,0,929,931,5,128,0,0,930,932,3,40, - 20,0,931,930,1,0,0,0,931,932,1,0,0,0,932,933,1,0,0,0,933,936,5,147,0, - 0,934,936,3,132,66,0,935,786,1,0,0,0,935,806,1,0,0,0,935,813,1,0,0,0, - 935,815,1,0,0,0,935,819,1,0,0,0,935,830,1,0,0,0,935,832,1,0,0,0,935,840, - 1,0,0,0,935,862,1,0,0,0,935,882,1,0,0,0,935,899,1,0,0,0,935,900,1,0,0, - 0,935,901,1,0,0,0,935,902,1,0,0,0,935,904,1,0,0,0,935,909,1,0,0,0,935, - 912,1,0,0,0,935,916,1,0,0,0,935,920,1,0,0,0,935,924,1,0,0,0,935,929,1, - 0,0,0,935,934,1,0,0,0,936,1041,1,0,0,0,937,941,10,18,0,0,938,942,5,112, - 0,0,939,942,5,151,0,0,940,942,5,138,0,0,941,938,1,0,0,0,941,939,1,0,0, - 0,941,940,1,0,0,0,942,943,1,0,0,0,943,1040,3,116,58,19,944,948,10,17, - 0,0,945,949,5,139,0,0,946,949,5,118,0,0,947,949,5,117,0,0,948,945,1,0, - 0,0,948,946,1,0,0,0,948,947,1,0,0,0,949,950,1,0,0,0,950,1040,3,116,58, - 18,951,976,10,16,0,0,952,977,5,121,0,0,953,977,5,122,0,0,954,977,5,133, - 0,0,955,977,5,131,0,0,956,977,5,132,0,0,957,977,5,123,0,0,958,977,5,124, - 0,0,959,961,5,58,0,0,960,959,1,0,0,0,960,961,1,0,0,0,961,962,1,0,0,0, - 962,964,5,42,0,0,963,965,5,15,0,0,964,963,1,0,0,0,964,965,1,0,0,0,965, - 977,1,0,0,0,966,968,5,58,0,0,967,966,1,0,0,0,967,968,1,0,0,0,968,969, - 1,0,0,0,969,977,7,10,0,0,970,977,5,145,0,0,971,977,5,146,0,0,972,977, - 5,135,0,0,973,977,5,126,0,0,974,977,5,127,0,0,975,977,5,134,0,0,976,952, - 1,0,0,0,976,953,1,0,0,0,976,954,1,0,0,0,976,955,1,0,0,0,976,956,1,0,0, - 0,976,957,1,0,0,0,976,958,1,0,0,0,976,960,1,0,0,0,976,967,1,0,0,0,976, - 970,1,0,0,0,976,971,1,0,0,0,976,972,1,0,0,0,976,973,1,0,0,0,976,974,1, - 0,0,0,976,975,1,0,0,0,977,978,1,0,0,0,978,1040,3,116,58,17,979,980,10, - 14,0,0,980,981,5,137,0,0,981,1040,3,116,58,15,982,983,10,12,0,0,983,984, - 5,2,0,0,984,1040,3,116,58,13,985,986,10,11,0,0,986,987,5,63,0,0,987,1040, - 3,116,58,12,988,990,10,10,0,0,989,991,5,58,0,0,990,989,1,0,0,0,990,991, - 1,0,0,0,991,992,1,0,0,0,992,993,5,9,0,0,993,994,3,116,58,0,994,995,5, - 2,0,0,995,996,3,116,58,11,996,1040,1,0,0,0,997,998,10,9,0,0,998,999,5, - 140,0,0,999,1000,3,116,58,0,1000,1001,5,115,0,0,1001,1002,3,116,58,9, - 1002,1040,1,0,0,0,1003,1004,10,25,0,0,1004,1005,5,129,0,0,1005,1006,3, - 116,58,0,1006,1007,5,148,0,0,1007,1040,1,0,0,0,1008,1009,10,24,0,0,1009, - 1010,5,120,0,0,1010,1040,5,108,0,0,1011,1012,10,23,0,0,1012,1013,5,120, - 0,0,1013,1040,3,160,80,0,1014,1015,10,22,0,0,1015,1016,5,136,0,0,1016, - 1017,5,129,0,0,1017,1018,3,116,58,0,1018,1019,5,148,0,0,1019,1040,1,0, - 0,0,1020,1021,10,21,0,0,1021,1022,5,136,0,0,1022,1040,5,108,0,0,1023, - 1024,10,20,0,0,1024,1025,5,136,0,0,1025,1040,3,160,80,0,1026,1027,10, - 15,0,0,1027,1029,5,46,0,0,1028,1030,5,58,0,0,1029,1028,1,0,0,0,1029,1030, - 1,0,0,0,1030,1031,1,0,0,0,1031,1040,5,59,0,0,1032,1037,10,8,0,0,1033, - 1034,5,6,0,0,1034,1038,3,160,80,0,1035,1036,5,6,0,0,1036,1038,5,110,0, - 0,1037,1033,1,0,0,0,1037,1035,1,0,0,0,1038,1040,1,0,0,0,1039,937,1,0, - 0,0,1039,944,1,0,0,0,1039,951,1,0,0,0,1039,979,1,0,0,0,1039,982,1,0,0, - 0,1039,985,1,0,0,0,1039,988,1,0,0,0,1039,997,1,0,0,0,1039,1003,1,0,0, - 0,1039,1008,1,0,0,0,1039,1011,1,0,0,0,1039,1014,1,0,0,0,1039,1020,1,0, - 0,0,1039,1023,1,0,0,0,1039,1026,1,0,0,0,1039,1032,1,0,0,0,1040,1043,1, - 0,0,0,1041,1039,1,0,0,0,1041,1042,1,0,0,0,1042,117,1,0,0,0,1043,1041, - 1,0,0,0,1044,1049,3,120,60,0,1045,1046,5,116,0,0,1046,1048,3,120,60,0, - 1047,1045,1,0,0,0,1048,1051,1,0,0,0,1049,1047,1,0,0,0,1049,1050,1,0,0, - 0,1050,1053,1,0,0,0,1051,1049,1,0,0,0,1052,1054,5,116,0,0,1053,1052,1, - 0,0,0,1053,1054,1,0,0,0,1054,119,1,0,0,0,1055,1058,3,122,61,0,1056,1058, - 3,116,58,0,1057,1055,1,0,0,0,1057,1056,1,0,0,0,1058,121,1,0,0,0,1059, - 1060,5,130,0,0,1060,1065,3,160,80,0,1061,1062,5,116,0,0,1062,1064,3,160, - 80,0,1063,1061,1,0,0,0,1064,1067,1,0,0,0,1065,1063,1,0,0,0,1065,1066, - 1,0,0,0,1066,1069,1,0,0,0,1067,1065,1,0,0,0,1068,1070,5,116,0,0,1069, - 1068,1,0,0,0,1069,1070,1,0,0,0,1070,1071,1,0,0,0,1071,1072,5,149,0,0, - 1072,1085,1,0,0,0,1073,1078,3,160,80,0,1074,1075,5,116,0,0,1075,1077, - 3,160,80,0,1076,1074,1,0,0,0,1077,1080,1,0,0,0,1078,1076,1,0,0,0,1078, - 1079,1,0,0,0,1079,1082,1,0,0,0,1080,1078,1,0,0,0,1081,1083,5,116,0,0, - 1082,1081,1,0,0,0,1082,1083,1,0,0,0,1083,1085,1,0,0,0,1084,1059,1,0,0, - 0,1084,1073,1,0,0,0,1085,1086,1,0,0,0,1086,1087,5,111,0,0,1087,1088,3, - 116,58,0,1088,123,1,0,0,0,1089,1090,5,132,0,0,1090,1094,3,160,80,0,1091, - 1093,3,126,63,0,1092,1091,1,0,0,0,1093,1096,1,0,0,0,1094,1092,1,0,0,0, - 1094,1095,1,0,0,0,1095,1097,1,0,0,0,1096,1094,1,0,0,0,1097,1098,5,151, - 0,0,1098,1099,5,124,0,0,1099,1122,1,0,0,0,1100,1101,5,132,0,0,1101,1105, - 3,160,80,0,1102,1104,3,126,63,0,1103,1102,1,0,0,0,1104,1107,1,0,0,0,1105, - 1103,1,0,0,0,1105,1106,1,0,0,0,1106,1108,1,0,0,0,1107,1105,1,0,0,0,1108, - 1114,5,124,0,0,1109,1115,3,124,62,0,1110,1111,5,128,0,0,1111,1112,3,116, - 58,0,1112,1113,5,147,0,0,1113,1115,1,0,0,0,1114,1109,1,0,0,0,1114,1110, - 1,0,0,0,1114,1115,1,0,0,0,1115,1116,1,0,0,0,1116,1117,5,132,0,0,1117, - 1118,5,151,0,0,1118,1119,3,160,80,0,1119,1120,5,124,0,0,1120,1122,1,0, - 0,0,1121,1089,1,0,0,0,1121,1100,1,0,0,0,1122,125,1,0,0,0,1123,1124,3, - 160,80,0,1124,1125,5,122,0,0,1125,1126,3,166,83,0,1126,1135,1,0,0,0,1127, - 1128,3,160,80,0,1128,1129,5,122,0,0,1129,1130,5,128,0,0,1130,1131,3,116, - 58,0,1131,1132,5,147,0,0,1132,1135,1,0,0,0,1133,1135,3,160,80,0,1134, - 1123,1,0,0,0,1134,1127,1,0,0,0,1134,1133,1,0,0,0,1135,127,1,0,0,0,1136, - 1141,3,130,65,0,1137,1138,5,116,0,0,1138,1140,3,130,65,0,1139,1137,1, - 0,0,0,1140,1143,1,0,0,0,1141,1139,1,0,0,0,1141,1142,1,0,0,0,1142,1145, - 1,0,0,0,1143,1141,1,0,0,0,1144,1146,5,116,0,0,1145,1144,1,0,0,0,1145, - 1146,1,0,0,0,1146,129,1,0,0,0,1147,1148,3,160,80,0,1148,1149,5,6,0,0, - 1149,1150,5,130,0,0,1150,1151,3,44,22,0,1151,1152,5,149,0,0,1152,1158, - 1,0,0,0,1153,1154,3,116,58,0,1154,1155,5,6,0,0,1155,1156,3,160,80,0,1156, - 1158,1,0,0,0,1157,1147,1,0,0,0,1157,1153,1,0,0,0,1158,131,1,0,0,0,1159, - 1167,3,164,82,0,1160,1161,3,140,70,0,1161,1162,5,120,0,0,1162,1164,1, - 0,0,0,1163,1160,1,0,0,0,1163,1164,1,0,0,0,1164,1165,1,0,0,0,1165,1167, - 3,134,67,0,1166,1159,1,0,0,0,1166,1163,1,0,0,0,1167,133,1,0,0,0,1168, - 1173,3,160,80,0,1169,1170,5,120,0,0,1170,1172,3,160,80,0,1171,1169,1, - 0,0,0,1172,1175,1,0,0,0,1173,1171,1,0,0,0,1173,1174,1,0,0,0,1174,135, - 1,0,0,0,1175,1173,1,0,0,0,1176,1177,6,68,-1,0,1177,1186,3,140,70,0,1178, - 1186,3,138,69,0,1179,1180,5,130,0,0,1180,1181,3,44,22,0,1181,1182,5,149, - 0,0,1182,1186,1,0,0,0,1183,1186,3,124,62,0,1184,1186,3,164,82,0,1185, - 1176,1,0,0,0,1185,1178,1,0,0,0,1185,1179,1,0,0,0,1185,1183,1,0,0,0,1185, - 1184,1,0,0,0,1186,1195,1,0,0,0,1187,1191,10,3,0,0,1188,1192,3,158,79, - 0,1189,1190,5,6,0,0,1190,1192,3,160,80,0,1191,1188,1,0,0,0,1191,1189, - 1,0,0,0,1192,1194,1,0,0,0,1193,1187,1,0,0,0,1194,1197,1,0,0,0,1195,1193, - 1,0,0,0,1195,1196,1,0,0,0,1196,137,1,0,0,0,1197,1195,1,0,0,0,1198,1199, - 3,160,80,0,1199,1201,5,130,0,0,1200,1202,3,142,71,0,1201,1200,1,0,0,0, - 1201,1202,1,0,0,0,1202,1203,1,0,0,0,1203,1204,5,149,0,0,1204,139,1,0, - 0,0,1205,1206,3,144,72,0,1206,1207,5,120,0,0,1207,1209,1,0,0,0,1208,1205, - 1,0,0,0,1208,1209,1,0,0,0,1209,1210,1,0,0,0,1210,1211,3,160,80,0,1211, - 141,1,0,0,0,1212,1217,3,116,58,0,1213,1214,5,116,0,0,1214,1216,3,116, - 58,0,1215,1213,1,0,0,0,1216,1219,1,0,0,0,1217,1215,1,0,0,0,1217,1218, - 1,0,0,0,1218,1221,1,0,0,0,1219,1217,1,0,0,0,1220,1222,5,116,0,0,1221, - 1220,1,0,0,0,1221,1222,1,0,0,0,1222,143,1,0,0,0,1223,1224,3,160,80,0, - 1224,145,1,0,0,0,1225,1234,5,106,0,0,1226,1227,5,120,0,0,1227,1234,7, - 11,0,0,1228,1229,5,108,0,0,1229,1231,5,120,0,0,1230,1232,7,11,0,0,1231, - 1230,1,0,0,0,1231,1232,1,0,0,0,1232,1234,1,0,0,0,1233,1225,1,0,0,0,1233, - 1226,1,0,0,0,1233,1228,1,0,0,0,1234,147,1,0,0,0,1235,1237,7,12,0,0,1236, - 1235,1,0,0,0,1236,1237,1,0,0,0,1237,1244,1,0,0,0,1238,1245,3,146,73,0, - 1239,1245,5,107,0,0,1240,1245,5,108,0,0,1241,1245,5,109,0,0,1242,1245, - 5,43,0,0,1243,1245,5,57,0,0,1244,1238,1,0,0,0,1244,1239,1,0,0,0,1244, - 1240,1,0,0,0,1244,1241,1,0,0,0,1244,1242,1,0,0,0,1244,1243,1,0,0,0,1245, - 149,1,0,0,0,1246,1250,3,148,74,0,1247,1250,5,110,0,0,1248,1250,5,59,0, - 0,1249,1246,1,0,0,0,1249,1247,1,0,0,0,1249,1248,1,0,0,0,1250,151,1,0, - 0,0,1251,1252,7,13,0,0,1252,153,1,0,0,0,1253,1254,7,14,0,0,1254,155,1, - 0,0,0,1255,1256,7,15,0,0,1256,157,1,0,0,0,1257,1260,5,105,0,0,1258,1260, - 3,156,78,0,1259,1257,1,0,0,0,1259,1258,1,0,0,0,1260,159,1,0,0,0,1261, - 1265,5,105,0,0,1262,1265,3,152,76,0,1263,1265,3,154,77,0,1264,1261,1, - 0,0,0,1264,1262,1,0,0,0,1264,1263,1,0,0,0,1265,161,1,0,0,0,1266,1267, - 3,166,83,0,1267,1268,5,122,0,0,1268,1269,3,148,74,0,1269,163,1,0,0,0, - 1270,1271,5,128,0,0,1271,1272,3,134,67,0,1272,1273,5,147,0,0,1273,165, - 1,0,0,0,1274,1277,5,110,0,0,1275,1277,3,168,84,0,1276,1274,1,0,0,0,1276, - 1275,1,0,0,0,1277,167,1,0,0,0,1278,1282,5,142,0,0,1279,1281,3,170,85, - 0,1280,1279,1,0,0,0,1281,1284,1,0,0,0,1282,1280,1,0,0,0,1282,1283,1,0, - 0,0,1283,1285,1,0,0,0,1284,1282,1,0,0,0,1285,1286,5,144,0,0,1286,169, - 1,0,0,0,1287,1288,5,157,0,0,1288,1289,3,116,58,0,1289,1290,5,147,0,0, - 1290,1293,1,0,0,0,1291,1293,5,156,0,0,1292,1287,1,0,0,0,1292,1291,1,0, - 0,0,1293,171,1,0,0,0,1294,1298,5,143,0,0,1295,1297,3,174,87,0,1296,1295, - 1,0,0,0,1297,1300,1,0,0,0,1298,1296,1,0,0,0,1298,1299,1,0,0,0,1299,1301, - 1,0,0,0,1300,1298,1,0,0,0,1301,1302,5,0,0,1,1302,173,1,0,0,0,1303,1304, - 5,159,0,0,1304,1305,3,116,58,0,1305,1306,5,147,0,0,1306,1309,1,0,0,0, - 1307,1309,5,158,0,0,1308,1303,1,0,0,0,1308,1307,1,0,0,0,1309,175,1,0, - 0,0,168,179,186,195,202,206,220,224,227,231,234,241,245,254,259,268,276, - 283,287,293,298,306,313,319,331,339,353,357,362,372,381,384,388,391,395, - 398,401,404,407,411,415,418,421,424,428,431,440,446,467,484,501,507,513, - 524,526,537,540,546,554,560,562,566,571,574,577,581,585,588,590,593,597, - 601,604,606,608,613,624,630,637,642,646,650,656,658,665,673,676,679,698, - 712,728,732,743,747,758,762,769,773,780,784,789,798,802,826,843,849,852, - 855,865,871,874,877,885,888,892,895,909,926,931,935,941,948,960,964,967, - 976,990,1029,1037,1039,1041,1049,1053,1057,1065,1069,1078,1082,1084,1094, - 1105,1114,1121,1134,1141,1145,1157,1163,1166,1173,1185,1191,1195,1201, - 1208,1217,1221,1231,1233,1236,1244,1249,1259,1264,1276,1282,1292,1298, - 1308 + 1,58,3,58,1033,8,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,1041,8,58,5,58, + 1043,8,58,10,58,12,58,1046,9,58,1,59,1,59,1,59,1,59,5,59,1052,8,59,10, + 59,12,59,1055,9,59,1,59,3,59,1058,8,59,1,59,1,59,1,59,1,59,1,59,5,59, + 1065,8,59,10,59,12,59,1068,9,59,1,59,3,59,1071,8,59,1,59,1,59,3,59,1075, + 8,59,1,59,1,59,1,59,3,59,1080,8,59,1,60,1,60,1,60,5,60,1085,8,60,10,60, + 12,60,1088,9,60,1,60,1,60,1,60,1,60,1,60,1,60,5,60,1096,8,60,10,60,12, + 60,1099,9,60,1,60,1,60,1,60,1,60,1,60,1,60,3,60,1107,8,60,1,60,1,60,1, + 60,1,60,1,60,3,60,1114,8,60,1,61,1,61,1,61,1,61,1,61,1,61,1,61,1,61,1, + 61,1,61,1,61,3,61,1127,8,61,1,62,1,62,1,62,5,62,1132,8,62,10,62,12,62, + 1135,9,62,1,62,3,62,1138,8,62,1,63,1,63,1,63,1,63,1,63,1,63,1,63,1,63, + 1,63,1,63,3,63,1150,8,63,1,64,1,64,1,64,1,64,3,64,1156,8,64,1,64,3,64, + 1159,8,64,1,65,1,65,1,65,5,65,1164,8,65,10,65,12,65,1167,9,65,1,66,1, + 66,1,66,1,66,1,66,1,66,1,66,1,66,1,66,3,66,1178,8,66,1,66,1,66,1,66,1, + 66,3,66,1184,8,66,5,66,1186,8,66,10,66,12,66,1189,9,66,1,67,1,67,1,67, + 3,67,1194,8,67,1,67,1,67,1,68,1,68,1,68,3,68,1201,8,68,1,68,1,68,1,69, + 1,69,1,69,5,69,1208,8,69,10,69,12,69,1211,9,69,1,69,3,69,1214,8,69,1, + 70,1,70,1,71,1,71,1,71,1,71,1,71,1,71,3,71,1224,8,71,3,71,1226,8,71,1, + 72,3,72,1229,8,72,1,72,1,72,1,72,1,72,1,72,1,72,3,72,1237,8,72,1,73,1, + 73,1,73,3,73,1242,8,73,1,74,1,74,1,75,1,75,1,76,1,76,1,77,1,77,3,77,1252, + 8,77,1,78,1,78,1,78,3,78,1257,8,78,1,79,1,79,1,79,1,79,1,80,1,80,1,80, + 1,80,1,81,1,81,3,81,1269,8,81,1,82,1,82,5,82,1273,8,82,10,82,12,82,1276, + 9,82,1,82,1,82,1,83,1,83,1,83,1,83,1,83,3,83,1285,8,83,1,84,1,84,5,84, + 1289,8,84,10,84,12,84,1292,9,84,1,84,1,84,1,85,1,85,1,85,1,85,1,85,3, + 85,1301,8,85,1,85,0,3,78,116,132,86,0,2,4,6,8,10,12,14,16,18,20,22,24, + 26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70, + 72,74,76,78,80,82,84,86,88,90,92,94,96,98,100,102,104,106,108,110,112, + 114,116,118,120,122,124,126,128,130,132,134,136,138,140,142,144,146,148, + 150,152,154,156,158,160,162,164,166,168,170,0,16,2,0,18,18,74,74,2,0, + 44,44,51,51,3,0,1,1,4,4,8,8,4,0,1,1,3,4,8,8,80,80,2,0,51,51,73,73,2,0, + 1,1,4,4,2,0,7,7,22,23,2,0,30,30,49,49,2,0,71,71,76,76,3,0,10,10,50,50, + 90,90,2,0,41,41,53,53,1,0,107,108,2,0,118,118,139,139,7,0,21,21,38,38, + 55,56,70,70,78,78,97,97,103,103,16,0,1,13,15,20,22,28,30,30,32,37,39, + 42,44,51,53,54,58,58,60,69,71,77,79,83,85,92,94,96,98,99,101,102,4,0, + 20,20,30,30,39,39,48,48,1475,0,175,1,0,0,0,2,182,1,0,0,0,4,184,1,0,0, + 0,6,186,1,0,0,0,8,193,1,0,0,0,10,216,1,0,0,0,12,218,1,0,0,0,14,225,1, + 0,0,0,16,232,1,0,0,0,18,245,1,0,0,0,20,257,1,0,0,0,22,266,1,0,0,0,24, + 274,1,0,0,0,26,296,1,0,0,0,28,311,1,0,0,0,30,320,1,0,0,0,32,325,1,0,0, + 0,34,329,1,0,0,0,36,331,1,0,0,0,38,340,1,0,0,0,40,344,1,0,0,0,42,358, + 1,0,0,0,44,362,1,0,0,0,46,377,1,0,0,0,48,380,1,0,0,0,50,429,1,0,0,0,52, + 432,1,0,0,0,54,438,1,0,0,0,56,442,1,0,0,0,58,448,1,0,0,0,60,466,1,0,0, + 0,62,469,1,0,0,0,64,472,1,0,0,0,66,482,1,0,0,0,68,485,1,0,0,0,70,489, + 1,0,0,0,72,522,1,0,0,0,74,524,1,0,0,0,76,527,1,0,0,0,78,542,1,0,0,0,80, + 604,1,0,0,0,82,609,1,0,0,0,84,620,1,0,0,0,86,622,1,0,0,0,88,628,1,0,0, + 0,90,636,1,0,0,0,92,654,1,0,0,0,94,656,1,0,0,0,96,664,1,0,0,0,98,669, + 1,0,0,0,100,677,1,0,0,0,102,681,1,0,0,0,104,685,1,0,0,0,106,694,1,0,0, + 0,108,708,1,0,0,0,110,710,1,0,0,0,112,769,1,0,0,0,114,771,1,0,0,0,116, + 932,1,0,0,0,118,1074,1,0,0,0,120,1113,1,0,0,0,122,1126,1,0,0,0,124,1128, + 1,0,0,0,126,1149,1,0,0,0,128,1158,1,0,0,0,130,1160,1,0,0,0,132,1177,1, + 0,0,0,134,1190,1,0,0,0,136,1200,1,0,0,0,138,1204,1,0,0,0,140,1215,1,0, + 0,0,142,1225,1,0,0,0,144,1228,1,0,0,0,146,1241,1,0,0,0,148,1243,1,0,0, + 0,150,1245,1,0,0,0,152,1247,1,0,0,0,154,1251,1,0,0,0,156,1256,1,0,0,0, + 158,1258,1,0,0,0,160,1262,1,0,0,0,162,1268,1,0,0,0,164,1270,1,0,0,0,166, + 1284,1,0,0,0,168,1286,1,0,0,0,170,1300,1,0,0,0,172,174,3,2,1,0,173,172, + 1,0,0,0,174,177,1,0,0,0,175,173,1,0,0,0,175,176,1,0,0,0,176,178,1,0,0, + 0,177,175,1,0,0,0,178,179,5,0,0,1,179,1,1,0,0,0,180,183,3,6,3,0,181,183, + 3,10,5,0,182,180,1,0,0,0,182,181,1,0,0,0,183,3,1,0,0,0,184,185,3,116, + 58,0,185,5,1,0,0,0,186,187,5,52,0,0,187,191,3,156,78,0,188,189,5,115, + 0,0,189,190,5,122,0,0,190,192,3,4,2,0,191,188,1,0,0,0,191,192,1,0,0,0, + 192,7,1,0,0,0,193,198,3,156,78,0,194,195,5,116,0,0,195,197,3,156,78,0, + 196,194,1,0,0,0,197,200,1,0,0,0,198,196,1,0,0,0,198,199,1,0,0,0,199,202, + 1,0,0,0,200,198,1,0,0,0,201,203,5,116,0,0,202,201,1,0,0,0,202,203,1,0, + 0,0,203,9,1,0,0,0,204,217,3,12,6,0,205,217,3,14,7,0,206,217,3,18,9,0, + 207,217,3,20,10,0,208,217,3,22,11,0,209,217,3,26,13,0,210,217,3,24,12, + 0,211,217,3,28,14,0,212,217,3,30,15,0,213,217,3,36,18,0,214,217,3,32, + 16,0,215,217,3,34,17,0,216,204,1,0,0,0,216,205,1,0,0,0,216,206,1,0,0, + 0,216,207,1,0,0,0,216,208,1,0,0,0,216,209,1,0,0,0,216,210,1,0,0,0,216, + 211,1,0,0,0,216,212,1,0,0,0,216,213,1,0,0,0,216,214,1,0,0,0,216,215,1, + 0,0,0,217,11,1,0,0,0,218,220,5,72,0,0,219,221,3,4,2,0,220,219,1,0,0,0, + 220,221,1,0,0,0,221,223,1,0,0,0,222,224,5,150,0,0,223,222,1,0,0,0,223, + 224,1,0,0,0,224,13,1,0,0,0,225,227,5,84,0,0,226,228,3,4,2,0,227,226,1, + 0,0,0,227,228,1,0,0,0,228,230,1,0,0,0,229,231,5,150,0,0,230,229,1,0,0, + 0,230,231,1,0,0,0,231,15,1,0,0,0,232,241,5,14,0,0,233,234,5,130,0,0,234, + 237,3,156,78,0,235,236,5,115,0,0,236,238,3,156,78,0,237,235,1,0,0,0,237, + 238,1,0,0,0,238,239,1,0,0,0,239,240,5,149,0,0,240,242,1,0,0,0,241,233, + 1,0,0,0,241,242,1,0,0,0,242,243,1,0,0,0,243,244,3,36,18,0,244,17,1,0, + 0,0,245,246,5,93,0,0,246,250,3,36,18,0,247,249,3,16,8,0,248,247,1,0,0, + 0,249,252,1,0,0,0,250,248,1,0,0,0,250,251,1,0,0,0,251,255,1,0,0,0,252, + 250,1,0,0,0,253,254,5,29,0,0,254,256,3,36,18,0,255,253,1,0,0,0,255,256, + 1,0,0,0,256,19,1,0,0,0,257,258,5,40,0,0,258,259,5,130,0,0,259,260,3,4, + 2,0,260,261,5,149,0,0,261,264,3,10,5,0,262,263,5,25,0,0,263,265,3,10, + 5,0,264,262,1,0,0,0,264,265,1,0,0,0,265,21,1,0,0,0,266,267,5,100,0,0, + 267,268,5,130,0,0,268,269,3,4,2,0,269,270,5,149,0,0,270,272,3,10,5,0, + 271,273,5,150,0,0,272,271,1,0,0,0,272,273,1,0,0,0,273,23,1,0,0,0,274, + 275,5,33,0,0,275,279,5,130,0,0,276,280,3,6,3,0,277,280,3,30,15,0,278, + 280,3,4,2,0,279,276,1,0,0,0,279,277,1,0,0,0,279,278,1,0,0,0,279,280,1, + 0,0,0,280,281,1,0,0,0,281,283,5,150,0,0,282,284,3,4,2,0,283,282,1,0,0, + 0,283,284,1,0,0,0,284,285,1,0,0,0,285,289,5,150,0,0,286,290,3,6,3,0,287, + 290,3,30,15,0,288,290,3,4,2,0,289,286,1,0,0,0,289,287,1,0,0,0,289,288, + 1,0,0,0,289,290,1,0,0,0,290,291,1,0,0,0,291,292,5,149,0,0,292,294,3,10, + 5,0,293,295,5,150,0,0,294,293,1,0,0,0,294,295,1,0,0,0,295,25,1,0,0,0, + 296,297,5,33,0,0,297,298,5,130,0,0,298,299,5,52,0,0,299,302,3,156,78, + 0,300,301,5,116,0,0,301,303,3,156,78,0,302,300,1,0,0,0,302,303,1,0,0, + 0,303,304,1,0,0,0,304,305,5,42,0,0,305,306,3,4,2,0,306,307,5,149,0,0, + 307,309,3,10,5,0,308,310,5,150,0,0,309,308,1,0,0,0,309,310,1,0,0,0,310, + 27,1,0,0,0,311,312,5,31,0,0,312,313,3,156,78,0,313,315,5,130,0,0,314, + 316,3,8,4,0,315,314,1,0,0,0,315,316,1,0,0,0,316,317,1,0,0,0,317,318,5, + 149,0,0,318,319,3,36,18,0,319,29,1,0,0,0,320,321,3,4,2,0,321,322,5,115, + 0,0,322,323,5,122,0,0,323,324,3,4,2,0,324,31,1,0,0,0,325,327,3,4,2,0, + 326,328,5,150,0,0,327,326,1,0,0,0,327,328,1,0,0,0,328,33,1,0,0,0,329, + 330,5,150,0,0,330,35,1,0,0,0,331,335,5,128,0,0,332,334,3,2,1,0,333,332, + 1,0,0,0,334,337,1,0,0,0,335,333,1,0,0,0,335,336,1,0,0,0,336,338,1,0,0, + 0,337,335,1,0,0,0,338,339,5,147,0,0,339,37,1,0,0,0,340,341,3,4,2,0,341, + 342,5,115,0,0,342,343,3,4,2,0,343,39,1,0,0,0,344,349,3,38,19,0,345,346, + 5,116,0,0,346,348,3,38,19,0,347,345,1,0,0,0,348,351,1,0,0,0,349,347,1, + 0,0,0,349,350,1,0,0,0,350,353,1,0,0,0,351,349,1,0,0,0,352,354,5,116,0, + 0,353,352,1,0,0,0,353,354,1,0,0,0,354,41,1,0,0,0,355,359,3,44,22,0,356, + 359,3,48,24,0,357,359,3,120,60,0,358,355,1,0,0,0,358,356,1,0,0,0,358, + 357,1,0,0,0,359,360,1,0,0,0,360,361,5,0,0,1,361,43,1,0,0,0,362,368,3, + 46,23,0,363,364,5,95,0,0,364,365,5,1,0,0,365,367,3,46,23,0,366,363,1, + 0,0,0,367,370,1,0,0,0,368,366,1,0,0,0,368,369,1,0,0,0,369,45,1,0,0,0, + 370,368,1,0,0,0,371,378,3,48,24,0,372,373,5,130,0,0,373,374,3,44,22,0, + 374,375,5,149,0,0,375,378,1,0,0,0,376,378,3,160,80,0,377,371,1,0,0,0, + 377,372,1,0,0,0,377,376,1,0,0,0,378,47,1,0,0,0,379,381,3,50,25,0,380, + 379,1,0,0,0,380,381,1,0,0,0,381,382,1,0,0,0,382,384,5,79,0,0,383,385, + 5,24,0,0,384,383,1,0,0,0,384,385,1,0,0,0,385,387,1,0,0,0,386,388,3,52, + 26,0,387,386,1,0,0,0,387,388,1,0,0,0,388,389,1,0,0,0,389,391,3,114,57, + 0,390,392,3,54,27,0,391,390,1,0,0,0,391,392,1,0,0,0,392,394,1,0,0,0,393, + 395,3,56,28,0,394,393,1,0,0,0,394,395,1,0,0,0,395,397,1,0,0,0,396,398, + 3,60,30,0,397,396,1,0,0,0,397,398,1,0,0,0,398,400,1,0,0,0,399,401,3,62, + 31,0,400,399,1,0,0,0,400,401,1,0,0,0,401,403,1,0,0,0,402,404,3,64,32, + 0,403,402,1,0,0,0,403,404,1,0,0,0,404,407,1,0,0,0,405,406,5,102,0,0,406, + 408,7,0,0,0,407,405,1,0,0,0,407,408,1,0,0,0,408,411,1,0,0,0,409,410,5, + 102,0,0,410,412,5,89,0,0,411,409,1,0,0,0,411,412,1,0,0,0,412,414,1,0, + 0,0,413,415,3,66,33,0,414,413,1,0,0,0,414,415,1,0,0,0,415,417,1,0,0,0, + 416,418,3,58,29,0,417,416,1,0,0,0,417,418,1,0,0,0,418,420,1,0,0,0,419, + 421,3,68,34,0,420,419,1,0,0,0,420,421,1,0,0,0,421,424,1,0,0,0,422,425, + 3,72,36,0,423,425,3,74,37,0,424,422,1,0,0,0,424,423,1,0,0,0,424,425,1, + 0,0,0,425,427,1,0,0,0,426,428,3,76,38,0,427,426,1,0,0,0,427,428,1,0,0, + 0,428,49,1,0,0,0,429,430,5,102,0,0,430,431,3,124,62,0,431,51,1,0,0,0, + 432,433,5,88,0,0,433,436,5,108,0,0,434,435,5,102,0,0,435,437,5,85,0,0, + 436,434,1,0,0,0,436,437,1,0,0,0,437,53,1,0,0,0,438,439,5,34,0,0,439,440, + 3,78,39,0,440,55,1,0,0,0,441,443,7,1,0,0,442,441,1,0,0,0,442,443,1,0, + 0,0,443,444,1,0,0,0,444,445,5,5,0,0,445,446,5,47,0,0,446,447,3,114,57, + 0,447,57,1,0,0,0,448,449,5,101,0,0,449,450,3,156,78,0,450,451,5,6,0,0, + 451,452,5,130,0,0,452,453,3,98,49,0,453,463,5,149,0,0,454,455,5,116,0, + 0,455,456,3,156,78,0,456,457,5,6,0,0,457,458,5,130,0,0,458,459,3,98,49, + 0,459,460,5,149,0,0,460,462,1,0,0,0,461,454,1,0,0,0,462,465,1,0,0,0,463, + 461,1,0,0,0,463,464,1,0,0,0,464,59,1,0,0,0,465,463,1,0,0,0,466,467,5, + 69,0,0,467,468,3,116,58,0,468,61,1,0,0,0,469,470,5,99,0,0,470,471,3,116, + 58,0,471,63,1,0,0,0,472,473,5,36,0,0,473,480,5,11,0,0,474,475,7,0,0,0, + 475,476,5,130,0,0,476,477,3,114,57,0,477,478,5,149,0,0,478,481,1,0,0, + 0,479,481,3,114,57,0,480,474,1,0,0,0,480,479,1,0,0,0,481,65,1,0,0,0,482, + 483,5,37,0,0,483,484,3,116,58,0,484,67,1,0,0,0,485,486,5,64,0,0,486,487, + 5,11,0,0,487,488,3,88,44,0,488,69,1,0,0,0,489,490,5,64,0,0,490,491,5, + 11,0,0,491,492,3,114,57,0,492,71,1,0,0,0,493,494,5,54,0,0,494,497,3,116, + 58,0,495,496,5,116,0,0,496,498,3,116,58,0,497,495,1,0,0,0,497,498,1,0, + 0,0,498,503,1,0,0,0,499,500,5,102,0,0,500,504,5,85,0,0,501,502,5,11,0, + 0,502,504,3,114,57,0,503,499,1,0,0,0,503,501,1,0,0,0,503,504,1,0,0,0, + 504,523,1,0,0,0,505,506,5,54,0,0,506,509,3,116,58,0,507,508,5,102,0,0, + 508,510,5,85,0,0,509,507,1,0,0,0,509,510,1,0,0,0,510,511,1,0,0,0,511, + 512,5,61,0,0,512,513,3,116,58,0,513,523,1,0,0,0,514,515,5,54,0,0,515, + 516,3,116,58,0,516,517,5,61,0,0,517,520,3,116,58,0,518,519,5,11,0,0,519, + 521,3,114,57,0,520,518,1,0,0,0,520,521,1,0,0,0,521,523,1,0,0,0,522,493, + 1,0,0,0,522,505,1,0,0,0,522,514,1,0,0,0,523,73,1,0,0,0,524,525,5,61,0, + 0,525,526,3,116,58,0,526,75,1,0,0,0,527,528,5,81,0,0,528,529,3,94,47, + 0,529,77,1,0,0,0,530,531,6,39,-1,0,531,533,3,132,66,0,532,534,5,28,0, + 0,533,532,1,0,0,0,533,534,1,0,0,0,534,536,1,0,0,0,535,537,3,86,43,0,536, + 535,1,0,0,0,536,537,1,0,0,0,537,543,1,0,0,0,538,539,5,130,0,0,539,540, + 3,78,39,0,540,541,5,149,0,0,541,543,1,0,0,0,542,530,1,0,0,0,542,538,1, + 0,0,0,543,558,1,0,0,0,544,545,10,3,0,0,545,546,3,82,41,0,546,547,3,78, + 39,4,547,557,1,0,0,0,548,550,10,4,0,0,549,551,3,80,40,0,550,549,1,0,0, + 0,550,551,1,0,0,0,551,552,1,0,0,0,552,553,5,47,0,0,553,554,3,78,39,0, + 554,555,3,84,42,0,555,557,1,0,0,0,556,544,1,0,0,0,556,548,1,0,0,0,557, + 560,1,0,0,0,558,556,1,0,0,0,558,559,1,0,0,0,559,79,1,0,0,0,560,558,1, + 0,0,0,561,563,7,2,0,0,562,561,1,0,0,0,562,563,1,0,0,0,563,564,1,0,0,0, + 564,571,5,44,0,0,565,567,5,44,0,0,566,568,7,2,0,0,567,566,1,0,0,0,567, + 568,1,0,0,0,568,571,1,0,0,0,569,571,7,2,0,0,570,562,1,0,0,0,570,565,1, + 0,0,0,570,569,1,0,0,0,571,605,1,0,0,0,572,574,7,3,0,0,573,572,1,0,0,0, + 573,574,1,0,0,0,574,575,1,0,0,0,575,577,7,4,0,0,576,578,5,65,0,0,577, + 576,1,0,0,0,577,578,1,0,0,0,578,587,1,0,0,0,579,581,7,4,0,0,580,582,5, + 65,0,0,581,580,1,0,0,0,581,582,1,0,0,0,582,584,1,0,0,0,583,585,7,3,0, + 0,584,583,1,0,0,0,584,585,1,0,0,0,585,587,1,0,0,0,586,573,1,0,0,0,586, + 579,1,0,0,0,587,605,1,0,0,0,588,590,7,5,0,0,589,588,1,0,0,0,589,590,1, + 0,0,0,590,591,1,0,0,0,591,593,5,35,0,0,592,594,5,65,0,0,593,592,1,0,0, + 0,593,594,1,0,0,0,594,603,1,0,0,0,595,597,5,35,0,0,596,598,5,65,0,0,597, + 596,1,0,0,0,597,598,1,0,0,0,598,600,1,0,0,0,599,601,7,5,0,0,600,599,1, + 0,0,0,600,601,1,0,0,0,601,603,1,0,0,0,602,589,1,0,0,0,602,595,1,0,0,0, + 603,605,1,0,0,0,604,570,1,0,0,0,604,586,1,0,0,0,604,602,1,0,0,0,605,81, + 1,0,0,0,606,607,5,17,0,0,607,610,5,47,0,0,608,610,5,116,0,0,609,606,1, + 0,0,0,609,608,1,0,0,0,610,83,1,0,0,0,611,612,5,62,0,0,612,621,3,114,57, + 0,613,614,5,96,0,0,614,615,5,130,0,0,615,616,3,114,57,0,616,617,5,149, + 0,0,617,621,1,0,0,0,618,619,5,96,0,0,619,621,3,114,57,0,620,611,1,0,0, + 0,620,613,1,0,0,0,620,618,1,0,0,0,621,85,1,0,0,0,622,623,5,77,0,0,623, + 626,3,92,46,0,624,625,5,61,0,0,625,627,3,92,46,0,626,624,1,0,0,0,626, + 627,1,0,0,0,627,87,1,0,0,0,628,633,3,90,45,0,629,630,5,116,0,0,630,632, + 3,90,45,0,631,629,1,0,0,0,632,635,1,0,0,0,633,631,1,0,0,0,633,634,1,0, + 0,0,634,89,1,0,0,0,635,633,1,0,0,0,636,638,3,116,58,0,637,639,7,6,0,0, + 638,637,1,0,0,0,638,639,1,0,0,0,639,642,1,0,0,0,640,641,5,60,0,0,641, + 643,7,7,0,0,642,640,1,0,0,0,642,643,1,0,0,0,643,646,1,0,0,0,644,645,5, + 16,0,0,645,647,5,110,0,0,646,644,1,0,0,0,646,647,1,0,0,0,647,91,1,0,0, + 0,648,655,3,160,80,0,649,652,3,144,72,0,650,651,5,151,0,0,651,653,3,144, + 72,0,652,650,1,0,0,0,652,653,1,0,0,0,653,655,1,0,0,0,654,648,1,0,0,0, + 654,649,1,0,0,0,655,93,1,0,0,0,656,661,3,96,48,0,657,658,5,116,0,0,658, + 660,3,96,48,0,659,657,1,0,0,0,660,663,1,0,0,0,661,659,1,0,0,0,661,662, + 1,0,0,0,662,95,1,0,0,0,663,661,1,0,0,0,664,665,3,156,78,0,665,666,5,122, + 0,0,666,667,3,146,73,0,667,97,1,0,0,0,668,670,3,100,50,0,669,668,1,0, + 0,0,669,670,1,0,0,0,670,672,1,0,0,0,671,673,3,102,51,0,672,671,1,0,0, + 0,672,673,1,0,0,0,673,675,1,0,0,0,674,676,3,104,52,0,675,674,1,0,0,0, + 675,676,1,0,0,0,676,99,1,0,0,0,677,678,5,67,0,0,678,679,5,11,0,0,679, + 680,3,114,57,0,680,101,1,0,0,0,681,682,5,64,0,0,682,683,5,11,0,0,683, + 684,3,88,44,0,684,103,1,0,0,0,685,686,7,8,0,0,686,687,3,106,53,0,687, + 105,1,0,0,0,688,695,3,108,54,0,689,690,5,9,0,0,690,691,3,108,54,0,691, + 692,5,2,0,0,692,693,3,108,54,0,693,695,1,0,0,0,694,688,1,0,0,0,694,689, + 1,0,0,0,695,107,1,0,0,0,696,697,5,19,0,0,697,709,5,75,0,0,698,699,5,94, + 0,0,699,709,5,68,0,0,700,701,5,94,0,0,701,709,5,32,0,0,702,703,3,144, + 72,0,703,704,5,68,0,0,704,709,1,0,0,0,705,706,3,144,72,0,706,707,5,32, + 0,0,707,709,1,0,0,0,708,696,1,0,0,0,708,698,1,0,0,0,708,700,1,0,0,0,708, + 702,1,0,0,0,708,705,1,0,0,0,709,109,1,0,0,0,710,711,3,116,58,0,711,712, + 5,0,0,1,712,111,1,0,0,0,713,770,3,156,78,0,714,715,3,156,78,0,715,716, + 5,130,0,0,716,717,3,156,78,0,717,724,3,112,56,0,718,719,5,116,0,0,719, + 720,3,156,78,0,720,721,3,112,56,0,721,723,1,0,0,0,722,718,1,0,0,0,723, + 726,1,0,0,0,724,722,1,0,0,0,724,725,1,0,0,0,725,728,1,0,0,0,726,724,1, + 0,0,0,727,729,5,116,0,0,728,727,1,0,0,0,728,729,1,0,0,0,729,730,1,0,0, + 0,730,731,5,149,0,0,731,770,1,0,0,0,732,733,3,156,78,0,733,734,5,130, + 0,0,734,739,3,158,79,0,735,736,5,116,0,0,736,738,3,158,79,0,737,735,1, + 0,0,0,738,741,1,0,0,0,739,737,1,0,0,0,739,740,1,0,0,0,740,743,1,0,0,0, + 741,739,1,0,0,0,742,744,5,116,0,0,743,742,1,0,0,0,743,744,1,0,0,0,744, + 745,1,0,0,0,745,746,5,149,0,0,746,770,1,0,0,0,747,748,3,156,78,0,748, + 749,5,130,0,0,749,754,3,112,56,0,750,751,5,116,0,0,751,753,3,112,56,0, + 752,750,1,0,0,0,753,756,1,0,0,0,754,752,1,0,0,0,754,755,1,0,0,0,755,758, + 1,0,0,0,756,754,1,0,0,0,757,759,5,116,0,0,758,757,1,0,0,0,758,759,1,0, + 0,0,759,760,1,0,0,0,760,761,5,149,0,0,761,770,1,0,0,0,762,763,3,156,78, + 0,763,765,5,130,0,0,764,766,3,114,57,0,765,764,1,0,0,0,765,766,1,0,0, + 0,766,767,1,0,0,0,767,768,5,149,0,0,768,770,1,0,0,0,769,713,1,0,0,0,769, + 714,1,0,0,0,769,732,1,0,0,0,769,747,1,0,0,0,769,762,1,0,0,0,770,113,1, + 0,0,0,771,776,3,116,58,0,772,773,5,116,0,0,773,775,3,116,58,0,774,772, + 1,0,0,0,775,778,1,0,0,0,776,774,1,0,0,0,776,777,1,0,0,0,777,780,1,0,0, + 0,778,776,1,0,0,0,779,781,5,116,0,0,780,779,1,0,0,0,780,781,1,0,0,0,781, + 115,1,0,0,0,782,783,6,58,-1,0,783,785,5,12,0,0,784,786,3,116,58,0,785, + 784,1,0,0,0,785,786,1,0,0,0,786,792,1,0,0,0,787,788,5,98,0,0,788,789, + 3,116,58,0,789,790,5,83,0,0,790,791,3,116,58,0,791,793,1,0,0,0,792,787, + 1,0,0,0,793,794,1,0,0,0,794,792,1,0,0,0,794,795,1,0,0,0,795,798,1,0,0, + 0,796,797,5,25,0,0,797,799,3,116,58,0,798,796,1,0,0,0,798,799,1,0,0,0, + 799,800,1,0,0,0,800,801,5,26,0,0,801,933,1,0,0,0,802,803,5,13,0,0,803, + 804,5,130,0,0,804,805,3,116,58,0,805,806,5,6,0,0,806,807,3,112,56,0,807, + 808,5,149,0,0,808,933,1,0,0,0,809,810,5,20,0,0,810,933,5,110,0,0,811, + 812,5,45,0,0,812,813,3,116,58,0,813,814,3,148,74,0,814,933,1,0,0,0,815, + 816,5,82,0,0,816,817,5,130,0,0,817,818,3,116,58,0,818,819,5,34,0,0,819, + 822,3,116,58,0,820,821,5,33,0,0,821,823,3,116,58,0,822,820,1,0,0,0,822, + 823,1,0,0,0,823,824,1,0,0,0,824,825,5,149,0,0,825,933,1,0,0,0,826,827, + 5,86,0,0,827,933,5,110,0,0,828,829,5,91,0,0,829,830,5,130,0,0,830,831, + 7,9,0,0,831,832,3,162,81,0,832,833,5,34,0,0,833,834,3,116,58,0,834,835, + 5,149,0,0,835,933,1,0,0,0,836,837,3,156,78,0,837,839,5,130,0,0,838,840, + 3,114,57,0,839,838,1,0,0,0,839,840,1,0,0,0,840,841,1,0,0,0,841,842,5, + 149,0,0,842,851,1,0,0,0,843,845,5,130,0,0,844,846,5,24,0,0,845,844,1, + 0,0,0,845,846,1,0,0,0,846,848,1,0,0,0,847,849,3,114,57,0,848,847,1,0, + 0,0,848,849,1,0,0,0,849,850,1,0,0,0,850,852,5,149,0,0,851,843,1,0,0,0, + 851,852,1,0,0,0,852,853,1,0,0,0,853,854,5,66,0,0,854,855,5,130,0,0,855, + 856,3,98,49,0,856,857,5,149,0,0,857,933,1,0,0,0,858,859,3,156,78,0,859, + 861,5,130,0,0,860,862,3,114,57,0,861,860,1,0,0,0,861,862,1,0,0,0,862, + 863,1,0,0,0,863,864,5,149,0,0,864,873,1,0,0,0,865,867,5,130,0,0,866,868, + 5,24,0,0,867,866,1,0,0,0,867,868,1,0,0,0,868,870,1,0,0,0,869,871,3,114, + 57,0,870,869,1,0,0,0,870,871,1,0,0,0,871,872,1,0,0,0,872,874,5,149,0, + 0,873,865,1,0,0,0,873,874,1,0,0,0,874,875,1,0,0,0,875,876,5,66,0,0,876, + 877,3,156,78,0,877,933,1,0,0,0,878,884,3,156,78,0,879,881,5,130,0,0,880, + 882,3,114,57,0,881,880,1,0,0,0,881,882,1,0,0,0,882,883,1,0,0,0,883,885, + 5,149,0,0,884,879,1,0,0,0,884,885,1,0,0,0,885,886,1,0,0,0,886,888,5,130, + 0,0,887,889,5,24,0,0,888,887,1,0,0,0,888,889,1,0,0,0,889,891,1,0,0,0, + 890,892,3,114,57,0,891,890,1,0,0,0,891,892,1,0,0,0,892,893,1,0,0,0,893, + 894,5,149,0,0,894,933,1,0,0,0,895,933,3,120,60,0,896,933,3,164,82,0,897, + 933,3,146,73,0,898,899,5,118,0,0,899,933,3,116,58,20,900,901,5,58,0,0, + 901,933,3,116,58,14,902,903,3,136,68,0,903,904,5,120,0,0,904,906,1,0, + 0,0,905,902,1,0,0,0,905,906,1,0,0,0,906,907,1,0,0,0,907,933,5,112,0,0, + 908,909,5,130,0,0,909,910,3,44,22,0,910,911,5,149,0,0,911,933,1,0,0,0, + 912,913,5,130,0,0,913,914,3,116,58,0,914,915,5,149,0,0,915,933,1,0,0, + 0,916,917,5,130,0,0,917,918,3,114,57,0,918,919,5,149,0,0,919,933,1,0, + 0,0,920,922,5,129,0,0,921,923,3,114,57,0,922,921,1,0,0,0,922,923,1,0, + 0,0,923,924,1,0,0,0,924,933,5,148,0,0,925,927,5,128,0,0,926,928,3,40, + 20,0,927,926,1,0,0,0,927,928,1,0,0,0,928,929,1,0,0,0,929,933,5,147,0, + 0,930,933,3,118,59,0,931,933,3,128,64,0,932,782,1,0,0,0,932,802,1,0,0, + 0,932,809,1,0,0,0,932,811,1,0,0,0,932,815,1,0,0,0,932,826,1,0,0,0,932, + 828,1,0,0,0,932,836,1,0,0,0,932,858,1,0,0,0,932,878,1,0,0,0,932,895,1, + 0,0,0,932,896,1,0,0,0,932,897,1,0,0,0,932,898,1,0,0,0,932,900,1,0,0,0, + 932,905,1,0,0,0,932,908,1,0,0,0,932,912,1,0,0,0,932,916,1,0,0,0,932,920, + 1,0,0,0,932,925,1,0,0,0,932,930,1,0,0,0,932,931,1,0,0,0,933,1044,1,0, + 0,0,934,938,10,19,0,0,935,939,5,112,0,0,936,939,5,151,0,0,937,939,5,138, + 0,0,938,935,1,0,0,0,938,936,1,0,0,0,938,937,1,0,0,0,939,940,1,0,0,0,940, + 1043,3,116,58,20,941,945,10,18,0,0,942,946,5,139,0,0,943,946,5,118,0, + 0,944,946,5,117,0,0,945,942,1,0,0,0,945,943,1,0,0,0,945,944,1,0,0,0,946, + 947,1,0,0,0,947,1043,3,116,58,19,948,973,10,17,0,0,949,974,5,121,0,0, + 950,974,5,122,0,0,951,974,5,133,0,0,952,974,5,131,0,0,953,974,5,132,0, + 0,954,974,5,123,0,0,955,974,5,124,0,0,956,958,5,58,0,0,957,956,1,0,0, + 0,957,958,1,0,0,0,958,959,1,0,0,0,959,961,5,42,0,0,960,962,5,15,0,0,961, + 960,1,0,0,0,961,962,1,0,0,0,962,974,1,0,0,0,963,965,5,58,0,0,964,963, + 1,0,0,0,964,965,1,0,0,0,965,966,1,0,0,0,966,974,7,10,0,0,967,974,5,145, + 0,0,968,974,5,146,0,0,969,974,5,135,0,0,970,974,5,126,0,0,971,974,5,127, + 0,0,972,974,5,134,0,0,973,949,1,0,0,0,973,950,1,0,0,0,973,951,1,0,0,0, + 973,952,1,0,0,0,973,953,1,0,0,0,973,954,1,0,0,0,973,955,1,0,0,0,973,957, + 1,0,0,0,973,964,1,0,0,0,973,967,1,0,0,0,973,968,1,0,0,0,973,969,1,0,0, + 0,973,970,1,0,0,0,973,971,1,0,0,0,973,972,1,0,0,0,974,975,1,0,0,0,975, + 1043,3,116,58,18,976,977,10,15,0,0,977,978,5,137,0,0,978,1043,3,116,58, + 16,979,980,10,13,0,0,980,981,5,2,0,0,981,1043,3,116,58,14,982,983,10, + 12,0,0,983,984,5,63,0,0,984,1043,3,116,58,13,985,987,10,11,0,0,986,988, + 5,58,0,0,987,986,1,0,0,0,987,988,1,0,0,0,988,989,1,0,0,0,989,990,5,9, + 0,0,990,991,3,116,58,0,991,992,5,2,0,0,992,993,3,116,58,12,993,1043,1, + 0,0,0,994,995,10,10,0,0,995,996,5,140,0,0,996,997,3,116,58,0,997,998, + 5,115,0,0,998,999,3,116,58,10,999,1043,1,0,0,0,1000,1001,10,30,0,0,1001, + 1003,5,130,0,0,1002,1004,3,114,57,0,1003,1002,1,0,0,0,1003,1004,1,0,0, + 0,1004,1005,1,0,0,0,1005,1043,5,149,0,0,1006,1007,10,26,0,0,1007,1008, + 5,129,0,0,1008,1009,3,116,58,0,1009,1010,5,148,0,0,1010,1043,1,0,0,0, + 1011,1012,10,25,0,0,1012,1013,5,120,0,0,1013,1043,5,108,0,0,1014,1015, + 10,24,0,0,1015,1016,5,120,0,0,1016,1043,3,156,78,0,1017,1018,10,23,0, + 0,1018,1019,5,136,0,0,1019,1020,5,129,0,0,1020,1021,3,116,58,0,1021,1022, + 5,148,0,0,1022,1043,1,0,0,0,1023,1024,10,22,0,0,1024,1025,5,136,0,0,1025, + 1043,5,108,0,0,1026,1027,10,21,0,0,1027,1028,5,136,0,0,1028,1043,3,156, + 78,0,1029,1030,10,16,0,0,1030,1032,5,46,0,0,1031,1033,5,58,0,0,1032,1031, + 1,0,0,0,1032,1033,1,0,0,0,1033,1034,1,0,0,0,1034,1043,5,59,0,0,1035,1040, + 10,9,0,0,1036,1037,5,6,0,0,1037,1041,3,156,78,0,1038,1039,5,6,0,0,1039, + 1041,5,110,0,0,1040,1036,1,0,0,0,1040,1038,1,0,0,0,1041,1043,1,0,0,0, + 1042,934,1,0,0,0,1042,941,1,0,0,0,1042,948,1,0,0,0,1042,976,1,0,0,0,1042, + 979,1,0,0,0,1042,982,1,0,0,0,1042,985,1,0,0,0,1042,994,1,0,0,0,1042,1000, + 1,0,0,0,1042,1006,1,0,0,0,1042,1011,1,0,0,0,1042,1014,1,0,0,0,1042,1017, + 1,0,0,0,1042,1023,1,0,0,0,1042,1026,1,0,0,0,1042,1029,1,0,0,0,1042,1035, + 1,0,0,0,1043,1046,1,0,0,0,1044,1042,1,0,0,0,1044,1045,1,0,0,0,1045,117, + 1,0,0,0,1046,1044,1,0,0,0,1047,1048,5,130,0,0,1048,1053,3,156,78,0,1049, + 1050,5,116,0,0,1050,1052,3,156,78,0,1051,1049,1,0,0,0,1052,1055,1,0,0, + 0,1053,1051,1,0,0,0,1053,1054,1,0,0,0,1054,1057,1,0,0,0,1055,1053,1,0, + 0,0,1056,1058,5,116,0,0,1057,1056,1,0,0,0,1057,1058,1,0,0,0,1058,1059, + 1,0,0,0,1059,1060,5,149,0,0,1060,1075,1,0,0,0,1061,1066,3,156,78,0,1062, + 1063,5,116,0,0,1063,1065,3,156,78,0,1064,1062,1,0,0,0,1065,1068,1,0,0, + 0,1066,1064,1,0,0,0,1066,1067,1,0,0,0,1067,1070,1,0,0,0,1068,1066,1,0, + 0,0,1069,1071,5,116,0,0,1070,1069,1,0,0,0,1070,1071,1,0,0,0,1071,1075, + 1,0,0,0,1072,1073,5,130,0,0,1073,1075,5,149,0,0,1074,1047,1,0,0,0,1074, + 1061,1,0,0,0,1074,1072,1,0,0,0,1075,1076,1,0,0,0,1076,1079,5,111,0,0, + 1077,1080,3,116,58,0,1078,1080,3,36,18,0,1079,1077,1,0,0,0,1079,1078, + 1,0,0,0,1080,119,1,0,0,0,1081,1082,5,132,0,0,1082,1086,3,156,78,0,1083, + 1085,3,122,61,0,1084,1083,1,0,0,0,1085,1088,1,0,0,0,1086,1084,1,0,0,0, + 1086,1087,1,0,0,0,1087,1089,1,0,0,0,1088,1086,1,0,0,0,1089,1090,5,151, + 0,0,1090,1091,5,124,0,0,1091,1114,1,0,0,0,1092,1093,5,132,0,0,1093,1097, + 3,156,78,0,1094,1096,3,122,61,0,1095,1094,1,0,0,0,1096,1099,1,0,0,0,1097, + 1095,1,0,0,0,1097,1098,1,0,0,0,1098,1100,1,0,0,0,1099,1097,1,0,0,0,1100, + 1106,5,124,0,0,1101,1107,3,120,60,0,1102,1103,5,128,0,0,1103,1104,3,116, + 58,0,1104,1105,5,147,0,0,1105,1107,1,0,0,0,1106,1101,1,0,0,0,1106,1102, + 1,0,0,0,1106,1107,1,0,0,0,1107,1108,1,0,0,0,1108,1109,5,132,0,0,1109, + 1110,5,151,0,0,1110,1111,3,156,78,0,1111,1112,5,124,0,0,1112,1114,1,0, + 0,0,1113,1081,1,0,0,0,1113,1092,1,0,0,0,1114,121,1,0,0,0,1115,1116,3, + 156,78,0,1116,1117,5,122,0,0,1117,1118,3,162,81,0,1118,1127,1,0,0,0,1119, + 1120,3,156,78,0,1120,1121,5,122,0,0,1121,1122,5,128,0,0,1122,1123,3,116, + 58,0,1123,1124,5,147,0,0,1124,1127,1,0,0,0,1125,1127,3,156,78,0,1126, + 1115,1,0,0,0,1126,1119,1,0,0,0,1126,1125,1,0,0,0,1127,123,1,0,0,0,1128, + 1133,3,126,63,0,1129,1130,5,116,0,0,1130,1132,3,126,63,0,1131,1129,1, + 0,0,0,1132,1135,1,0,0,0,1133,1131,1,0,0,0,1133,1134,1,0,0,0,1134,1137, + 1,0,0,0,1135,1133,1,0,0,0,1136,1138,5,116,0,0,1137,1136,1,0,0,0,1137, + 1138,1,0,0,0,1138,125,1,0,0,0,1139,1140,3,156,78,0,1140,1141,5,6,0,0, + 1141,1142,5,130,0,0,1142,1143,3,44,22,0,1143,1144,5,149,0,0,1144,1150, + 1,0,0,0,1145,1146,3,116,58,0,1146,1147,5,6,0,0,1147,1148,3,156,78,0,1148, + 1150,1,0,0,0,1149,1139,1,0,0,0,1149,1145,1,0,0,0,1150,127,1,0,0,0,1151, + 1159,3,160,80,0,1152,1153,3,136,68,0,1153,1154,5,120,0,0,1154,1156,1, + 0,0,0,1155,1152,1,0,0,0,1155,1156,1,0,0,0,1156,1157,1,0,0,0,1157,1159, + 3,130,65,0,1158,1151,1,0,0,0,1158,1155,1,0,0,0,1159,129,1,0,0,0,1160, + 1165,3,156,78,0,1161,1162,5,120,0,0,1162,1164,3,156,78,0,1163,1161,1, + 0,0,0,1164,1167,1,0,0,0,1165,1163,1,0,0,0,1165,1166,1,0,0,0,1166,131, + 1,0,0,0,1167,1165,1,0,0,0,1168,1169,6,66,-1,0,1169,1178,3,136,68,0,1170, + 1178,3,134,67,0,1171,1172,5,130,0,0,1172,1173,3,44,22,0,1173,1174,5,149, + 0,0,1174,1178,1,0,0,0,1175,1178,3,120,60,0,1176,1178,3,160,80,0,1177, + 1168,1,0,0,0,1177,1170,1,0,0,0,1177,1171,1,0,0,0,1177,1175,1,0,0,0,1177, + 1176,1,0,0,0,1178,1187,1,0,0,0,1179,1183,10,3,0,0,1180,1184,3,154,77, + 0,1181,1182,5,6,0,0,1182,1184,3,156,78,0,1183,1180,1,0,0,0,1183,1181, + 1,0,0,0,1184,1186,1,0,0,0,1185,1179,1,0,0,0,1186,1189,1,0,0,0,1187,1185, + 1,0,0,0,1187,1188,1,0,0,0,1188,133,1,0,0,0,1189,1187,1,0,0,0,1190,1191, + 3,156,78,0,1191,1193,5,130,0,0,1192,1194,3,138,69,0,1193,1192,1,0,0,0, + 1193,1194,1,0,0,0,1194,1195,1,0,0,0,1195,1196,5,149,0,0,1196,135,1,0, + 0,0,1197,1198,3,140,70,0,1198,1199,5,120,0,0,1199,1201,1,0,0,0,1200,1197, + 1,0,0,0,1200,1201,1,0,0,0,1201,1202,1,0,0,0,1202,1203,3,156,78,0,1203, + 137,1,0,0,0,1204,1209,3,116,58,0,1205,1206,5,116,0,0,1206,1208,3,116, + 58,0,1207,1205,1,0,0,0,1208,1211,1,0,0,0,1209,1207,1,0,0,0,1209,1210, + 1,0,0,0,1210,1213,1,0,0,0,1211,1209,1,0,0,0,1212,1214,5,116,0,0,1213, + 1212,1,0,0,0,1213,1214,1,0,0,0,1214,139,1,0,0,0,1215,1216,3,156,78,0, + 1216,141,1,0,0,0,1217,1226,5,106,0,0,1218,1219,5,120,0,0,1219,1226,7, + 11,0,0,1220,1221,5,108,0,0,1221,1223,5,120,0,0,1222,1224,7,11,0,0,1223, + 1222,1,0,0,0,1223,1224,1,0,0,0,1224,1226,1,0,0,0,1225,1217,1,0,0,0,1225, + 1218,1,0,0,0,1225,1220,1,0,0,0,1226,143,1,0,0,0,1227,1229,7,12,0,0,1228, + 1227,1,0,0,0,1228,1229,1,0,0,0,1229,1236,1,0,0,0,1230,1237,3,142,71,0, + 1231,1237,5,107,0,0,1232,1237,5,108,0,0,1233,1237,5,109,0,0,1234,1237, + 5,43,0,0,1235,1237,5,57,0,0,1236,1230,1,0,0,0,1236,1231,1,0,0,0,1236, + 1232,1,0,0,0,1236,1233,1,0,0,0,1236,1234,1,0,0,0,1236,1235,1,0,0,0,1237, + 145,1,0,0,0,1238,1242,3,144,72,0,1239,1242,5,110,0,0,1240,1242,5,59,0, + 0,1241,1238,1,0,0,0,1241,1239,1,0,0,0,1241,1240,1,0,0,0,1242,147,1,0, + 0,0,1243,1244,7,13,0,0,1244,149,1,0,0,0,1245,1246,7,14,0,0,1246,151,1, + 0,0,0,1247,1248,7,15,0,0,1248,153,1,0,0,0,1249,1252,5,105,0,0,1250,1252, + 3,152,76,0,1251,1249,1,0,0,0,1251,1250,1,0,0,0,1252,155,1,0,0,0,1253, + 1257,5,105,0,0,1254,1257,3,148,74,0,1255,1257,3,150,75,0,1256,1253,1, + 0,0,0,1256,1254,1,0,0,0,1256,1255,1,0,0,0,1257,157,1,0,0,0,1258,1259, + 3,162,81,0,1259,1260,5,122,0,0,1260,1261,3,144,72,0,1261,159,1,0,0,0, + 1262,1263,5,128,0,0,1263,1264,3,130,65,0,1264,1265,5,147,0,0,1265,161, + 1,0,0,0,1266,1269,5,110,0,0,1267,1269,3,164,82,0,1268,1266,1,0,0,0,1268, + 1267,1,0,0,0,1269,163,1,0,0,0,1270,1274,5,142,0,0,1271,1273,3,166,83, + 0,1272,1271,1,0,0,0,1273,1276,1,0,0,0,1274,1272,1,0,0,0,1274,1275,1,0, + 0,0,1275,1277,1,0,0,0,1276,1274,1,0,0,0,1277,1278,5,144,0,0,1278,165, + 1,0,0,0,1279,1280,5,157,0,0,1280,1281,3,116,58,0,1281,1282,5,147,0,0, + 1282,1285,1,0,0,0,1283,1285,5,156,0,0,1284,1279,1,0,0,0,1284,1283,1,0, + 0,0,1285,167,1,0,0,0,1286,1290,5,143,0,0,1287,1289,3,170,85,0,1288,1287, + 1,0,0,0,1289,1292,1,0,0,0,1290,1288,1,0,0,0,1290,1291,1,0,0,0,1291,1293, + 1,0,0,0,1292,1290,1,0,0,0,1293,1294,5,0,0,1,1294,169,1,0,0,0,1295,1296, + 5,159,0,0,1296,1297,3,116,58,0,1297,1298,5,147,0,0,1298,1301,1,0,0,0, + 1299,1301,5,158,0,0,1300,1295,1,0,0,0,1300,1299,1,0,0,0,1301,171,1,0, + 0,0,167,175,182,191,198,202,216,220,223,227,230,237,241,250,255,264,272, + 279,283,289,294,302,309,315,327,335,349,353,358,368,377,380,384,387,391, + 394,397,400,403,407,411,414,417,420,424,427,436,442,463,480,497,503,509, + 520,522,533,536,542,550,556,558,562,567,570,573,577,581,584,586,589,593, + 597,600,602,604,609,620,626,633,638,642,646,652,654,661,669,672,675,694, + 708,724,728,739,743,754,758,765,769,776,780,785,794,798,822,839,845,848, + 851,861,867,870,873,881,884,888,891,905,922,927,932,938,945,957,961,964, + 973,987,1003,1032,1040,1042,1044,1053,1057,1066,1070,1074,1079,1086,1097, + 1106,1113,1126,1133,1137,1149,1155,1158,1165,1177,1183,1187,1193,1200, + 1209,1213,1223,1225,1228,1236,1241,1251,1256,1268,1274,1284,1290,1300 }; staticData->serializedATN = antlr4::atn::SerializedATNView(serializedATNSegment, sizeof(serializedATNSegment) / sizeof(serializedATNSegment[0])); @@ -615,7 +611,7 @@ void hogqlparserParserInitialize() { for (size_t i = 0; i < count; i++) { staticData->decisionToDFA.emplace_back(staticData->atn->getDecisionState(i), i); } - hogqlparserParserStaticData = staticData.release(); + hogqlparserParserStaticData = std::move(staticData); } } @@ -697,20 +693,20 @@ HogQLParser::ProgramContext* HogQLParser::program() { }); try { enterOuterAlt(_localctx, 1); - setState(179); + setState(175); _errHandler->sync(this); _la = _input->LA(1); while ((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & -536887298) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493105500848127) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 4212759) != 0)) { - setState(176); + setState(172); declaration(); - setState(181); + setState(177); _errHandler->sync(this); _la = _input->LA(1); } - setState(182); + setState(178); match(HogQLParser::EOF); } @@ -762,12 +758,12 @@ HogQLParser::DeclarationContext* HogQLParser::declaration() { exitRule(); }); try { - setState(186); + setState(182); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::LET: { enterOuterAlt(_localctx, 1); - setState(184); + setState(180); varDecl(); break; } @@ -889,7 +885,7 @@ HogQLParser::DeclarationContext* HogQLParser::declaration() { case HogQLParser::QUOTE_SINGLE_TEMPLATE: case HogQLParser::SEMICOLON: { enterOuterAlt(_localctx, 2); - setState(185); + setState(181); statement(); break; } @@ -944,7 +940,7 @@ HogQLParser::ExpressionContext* HogQLParser::expression() { }); try { enterOuterAlt(_localctx, 1); - setState(188); + setState(184); columnExpr(0); } @@ -1010,20 +1006,20 @@ HogQLParser::VarDeclContext* HogQLParser::varDecl() { }); try { enterOuterAlt(_localctx, 1); - setState(190); + setState(186); match(HogQLParser::LET); - setState(191); + setState(187); identifier(); - setState(195); + setState(191); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COLON) { - setState(192); + setState(188); match(HogQLParser::COLON); - setState(193); + setState(189); match(HogQLParser::EQ_SINGLE); - setState(194); + setState(190); expression(); } @@ -1087,28 +1083,28 @@ HogQLParser::IdentifierListContext* HogQLParser::identifierList() { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(197); + setState(193); identifier(); - setState(202); + setState(198); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 3, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { if (alt == 1) { - setState(198); + setState(194); match(HogQLParser::COMMA); - setState(199); + setState(195); identifier(); } - setState(204); + setState(200); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 3, _ctx); } - setState(206); + setState(202); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COMMA) { - setState(205); + setState(201); match(HogQLParser::COMMA); } @@ -1201,89 +1197,89 @@ HogQLParser::StatementContext* HogQLParser::statement() { exitRule(); }); try { - setState(220); + setState(216); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 5, _ctx)) { case 1: { enterOuterAlt(_localctx, 1); - setState(208); + setState(204); returnStmt(); break; } case 2: { enterOuterAlt(_localctx, 2); - setState(209); + setState(205); throwStmt(); break; } case 3: { enterOuterAlt(_localctx, 3); - setState(210); + setState(206); tryCatchStmt(); break; } case 4: { enterOuterAlt(_localctx, 4); - setState(211); + setState(207); ifStmt(); break; } case 5: { enterOuterAlt(_localctx, 5); - setState(212); + setState(208); whileStmt(); break; } case 6: { enterOuterAlt(_localctx, 6); - setState(213); + setState(209); forInStmt(); break; } case 7: { enterOuterAlt(_localctx, 7); - setState(214); + setState(210); forStmt(); break; } case 8: { enterOuterAlt(_localctx, 8); - setState(215); + setState(211); funcStmt(); break; } case 9: { enterOuterAlt(_localctx, 9); - setState(216); + setState(212); varAssignment(); break; } case 10: { enterOuterAlt(_localctx, 10); - setState(217); + setState(213); block(); break; } case 11: { enterOuterAlt(_localctx, 11); - setState(218); + setState(214); exprStmt(); break; } case 12: { enterOuterAlt(_localctx, 12); - setState(219); + setState(215); emptyStmt(); break; } @@ -1346,14 +1342,14 @@ HogQLParser::ReturnStmtContext* HogQLParser::returnStmt() { }); try { enterOuterAlt(_localctx, 1); - setState(222); + setState(218); match(HogQLParser::RETURN); - setState(224); + setState(220); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 6, _ctx)) { case 1: { - setState(223); + setState(219); expression(); break; } @@ -1361,12 +1357,12 @@ HogQLParser::ReturnStmtContext* HogQLParser::returnStmt() { default: break; } - setState(227); + setState(223); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 7, _ctx)) { case 1: { - setState(226); + setState(222); match(HogQLParser::SEMICOLON); break; } @@ -1429,14 +1425,14 @@ HogQLParser::ThrowStmtContext* HogQLParser::throwStmt() { }); try { enterOuterAlt(_localctx, 1); - setState(229); + setState(225); match(HogQLParser::THROW); - setState(231); + setState(227); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 8, _ctx)) { case 1: { - setState(230); + setState(226); expression(); break; } @@ -1444,12 +1440,12 @@ HogQLParser::ThrowStmtContext* HogQLParser::throwStmt() { default: break; } - setState(234); + setState(230); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 9, _ctx)) { case 1: { - setState(233); + setState(229); match(HogQLParser::SEMICOLON); break; } @@ -1529,31 +1525,31 @@ HogQLParser::CatchBlockContext* HogQLParser::catchBlock() { }); try { enterOuterAlt(_localctx, 1); - setState(236); + setState(232); match(HogQLParser::CATCH); - setState(245); + setState(241); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::LPAREN) { - setState(237); + setState(233); match(HogQLParser::LPAREN); - setState(238); + setState(234); antlrcpp::downCast(_localctx)->catchVar = identifier(); - setState(241); + setState(237); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COLON) { - setState(239); + setState(235); match(HogQLParser::COLON); - setState(240); + setState(236); antlrcpp::downCast(_localctx)->catchType = identifier(); } - setState(243); + setState(239); match(HogQLParser::RPAREN); } - setState(247); + setState(243); antlrcpp::downCast(_localctx)->catchStmt = block(); } @@ -1623,28 +1619,28 @@ HogQLParser::TryCatchStmtContext* HogQLParser::tryCatchStmt() { }); try { enterOuterAlt(_localctx, 1); - setState(249); + setState(245); match(HogQLParser::TRY); - setState(250); + setState(246); antlrcpp::downCast(_localctx)->tryStmt = block(); - setState(254); + setState(250); _errHandler->sync(this); _la = _input->LA(1); while (_la == HogQLParser::CATCH) { - setState(251); + setState(247); catchBlock(); - setState(256); + setState(252); _errHandler->sync(this); _la = _input->LA(1); } - setState(259); + setState(255); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::FINALLY) { - setState(257); + setState(253); match(HogQLParser::FINALLY); - setState(258); + setState(254); antlrcpp::downCast(_localctx)->finallyStmt = block(); } @@ -1718,24 +1714,24 @@ HogQLParser::IfStmtContext* HogQLParser::ifStmt() { }); try { enterOuterAlt(_localctx, 1); - setState(261); + setState(257); match(HogQLParser::IF); - setState(262); + setState(258); match(HogQLParser::LPAREN); - setState(263); + setState(259); expression(); - setState(264); + setState(260); match(HogQLParser::RPAREN); - setState(265); + setState(261); statement(); - setState(268); + setState(264); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 14, _ctx)) { case 1: { - setState(266); + setState(262); match(HogQLParser::ELSE); - setState(267); + setState(263); statement(); break; } @@ -1810,22 +1806,22 @@ HogQLParser::WhileStmtContext* HogQLParser::whileStmt() { }); try { enterOuterAlt(_localctx, 1); - setState(270); + setState(266); match(HogQLParser::WHILE); - setState(271); + setState(267); match(HogQLParser::LPAREN); - setState(272); + setState(268); expression(); - setState(273); + setState(269); match(HogQLParser::RPAREN); - setState(274); + setState(270); statement(); - setState(276); + setState(272); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 15, _ctx)) { case 1: { - setState(275); + setState(271); match(HogQLParser::SEMICOLON); break; } @@ -1925,28 +1921,28 @@ HogQLParser::ForStmtContext* HogQLParser::forStmt() { }); try { enterOuterAlt(_localctx, 1); - setState(278); + setState(274); match(HogQLParser::FOR); - setState(279); + setState(275); match(HogQLParser::LPAREN); - setState(283); + setState(279); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 16, _ctx)) { case 1: { - setState(280); + setState(276); antlrcpp::downCast(_localctx)->initializerVarDeclr = varDecl(); break; } case 2: { - setState(281); + setState(277); antlrcpp::downCast(_localctx)->initializerVarAssignment = varAssignment(); break; } case 3: { - setState(282); + setState(278); antlrcpp::downCast(_localctx)->initializerExpression = expression(); break; } @@ -1954,9 +1950,9 @@ HogQLParser::ForStmtContext* HogQLParser::forStmt() { default: break; } - setState(285); + setState(281); match(HogQLParser::SEMICOLON); - setState(287); + setState(283); _errHandler->sync(this); _la = _input->LA(1); @@ -1964,29 +1960,29 @@ HogQLParser::ForStmtContext* HogQLParser::forStmt() { ((1ULL << _la) & -4503602311741442) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493036243451903) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 18455) != 0)) { - setState(286); + setState(282); antlrcpp::downCast(_localctx)->condition = expression(); } - setState(289); + setState(285); match(HogQLParser::SEMICOLON); - setState(293); + setState(289); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 18, _ctx)) { case 1: { - setState(290); + setState(286); antlrcpp::downCast(_localctx)->incrementVarDeclr = varDecl(); break; } case 2: { - setState(291); + setState(287); antlrcpp::downCast(_localctx)->incrementVarAssignment = varAssignment(); break; } case 3: { - setState(292); + setState(288); antlrcpp::downCast(_localctx)->incrementExpression = expression(); break; } @@ -1994,16 +1990,16 @@ HogQLParser::ForStmtContext* HogQLParser::forStmt() { default: break; } - setState(295); + setState(291); match(HogQLParser::RPAREN); - setState(296); + setState(292); statement(); - setState(298); + setState(294); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 19, _ctx)) { case 1: { - setState(297); + setState(293); match(HogQLParser::SEMICOLON); break; } @@ -2099,38 +2095,38 @@ HogQLParser::ForInStmtContext* HogQLParser::forInStmt() { }); try { enterOuterAlt(_localctx, 1); - setState(300); + setState(296); match(HogQLParser::FOR); - setState(301); + setState(297); match(HogQLParser::LPAREN); - setState(302); + setState(298); match(HogQLParser::LET); - setState(303); + setState(299); identifier(); - setState(306); + setState(302); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COMMA) { - setState(304); + setState(300); match(HogQLParser::COMMA); - setState(305); + setState(301); identifier(); } - setState(308); + setState(304); match(HogQLParser::IN); - setState(309); + setState(305); expression(); - setState(310); + setState(306); match(HogQLParser::RPAREN); - setState(311); + setState(307); statement(); - setState(313); + setState(309); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 21, _ctx)) { case 1: { - setState(312); + setState(308); match(HogQLParser::SEMICOLON); break; } @@ -2206,25 +2202,25 @@ HogQLParser::FuncStmtContext* HogQLParser::funcStmt() { }); try { enterOuterAlt(_localctx, 1); - setState(315); + setState(311); match(HogQLParser::FN); - setState(316); + setState(312); identifier(); - setState(317); + setState(313); match(HogQLParser::LPAREN); - setState(319); + setState(315); _errHandler->sync(this); _la = _input->LA(1); if ((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & -725088338784043010) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 3229277487103) != 0)) { - setState(318); + setState(314); identifierList(); } - setState(321); + setState(317); match(HogQLParser::RPAREN); - setState(322); + setState(318); block(); } @@ -2285,13 +2281,13 @@ HogQLParser::VarAssignmentContext* HogQLParser::varAssignment() { }); try { enterOuterAlt(_localctx, 1); - setState(324); + setState(320); expression(); - setState(325); + setState(321); match(HogQLParser::COLON); - setState(326); + setState(322); match(HogQLParser::EQ_SINGLE); - setState(327); + setState(323); expression(); } @@ -2344,14 +2340,14 @@ HogQLParser::ExprStmtContext* HogQLParser::exprStmt() { }); try { enterOuterAlt(_localctx, 1); - setState(329); + setState(325); expression(); - setState(331); + setState(327); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 23, _ctx)) { case 1: { - setState(330); + setState(326); match(HogQLParser::SEMICOLON); break; } @@ -2406,7 +2402,7 @@ HogQLParser::EmptyStmtContext* HogQLParser::emptyStmt() { }); try { enterOuterAlt(_localctx, 1); - setState(333); + setState(329); match(HogQLParser::SEMICOLON); } @@ -2468,22 +2464,22 @@ HogQLParser::BlockContext* HogQLParser::block() { }); try { enterOuterAlt(_localctx, 1); - setState(335); + setState(331); match(HogQLParser::LBRACE); - setState(339); + setState(335); _errHandler->sync(this); _la = _input->LA(1); while ((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & -536887298) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493105500848127) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 4212759) != 0)) { - setState(336); + setState(332); declaration(); - setState(341); + setState(337); _errHandler->sync(this); _la = _input->LA(1); } - setState(342); + setState(338); match(HogQLParser::RBRACE); } @@ -2540,11 +2536,11 @@ HogQLParser::KvPairContext* HogQLParser::kvPair() { }); try { enterOuterAlt(_localctx, 1); - setState(344); + setState(340); expression(); - setState(345); + setState(341); match(HogQLParser::COLON); - setState(346); + setState(342); expression(); } @@ -2607,28 +2603,28 @@ HogQLParser::KvPairListContext* HogQLParser::kvPairList() { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(348); + setState(344); kvPair(); - setState(353); + setState(349); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 25, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { if (alt == 1) { - setState(349); + setState(345); match(HogQLParser::COMMA); - setState(350); + setState(346); kvPair(); } - setState(355); + setState(351); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 25, _ctx); } - setState(357); + setState(353); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COMMA) { - setState(356); + setState(352); match(HogQLParser::COMMA); } @@ -2690,23 +2686,23 @@ HogQLParser::SelectContext* HogQLParser::select() { }); try { enterOuterAlt(_localctx, 1); - setState(362); + setState(358); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 27, _ctx)) { case 1: { - setState(359); + setState(355); selectUnionStmt(); break; } case 2: { - setState(360); + setState(356); selectStmt(); break; } case 3: { - setState(361); + setState(357); hogqlxTagElement(); break; } @@ -2714,7 +2710,7 @@ HogQLParser::SelectContext* HogQLParser::select() { default: break; } - setState(364); + setState(360); match(HogQLParser::EOF); } @@ -2784,19 +2780,19 @@ HogQLParser::SelectUnionStmtContext* HogQLParser::selectUnionStmt() { }); try { enterOuterAlt(_localctx, 1); - setState(366); + setState(362); selectStmtWithParens(); - setState(372); + setState(368); _errHandler->sync(this); _la = _input->LA(1); while (_la == HogQLParser::UNION) { - setState(367); + setState(363); match(HogQLParser::UNION); - setState(368); + setState(364); match(HogQLParser::ALL); - setState(369); + setState(365); selectStmtWithParens(); - setState(374); + setState(370); _errHandler->sync(this); _la = _input->LA(1); } @@ -2862,31 +2858,31 @@ HogQLParser::SelectStmtWithParensContext* HogQLParser::selectStmtWithParens() { exitRule(); }); try { - setState(381); + setState(377); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::SELECT: case HogQLParser::WITH: { enterOuterAlt(_localctx, 1); - setState(375); + setState(371); selectStmt(); break; } case HogQLParser::LPAREN: { enterOuterAlt(_localctx, 2); - setState(376); + setState(372); match(HogQLParser::LPAREN); - setState(377); + setState(373); selectUnionStmt(); - setState(378); + setState(374); match(HogQLParser::RPAREN); break; } case HogQLParser::LBRACE: { enterOuterAlt(_localctx, 3); - setState(380); + setState(376); placeholder(); break; } @@ -3022,22 +3018,22 @@ HogQLParser::SelectStmtContext* HogQLParser::selectStmt() { }); try { enterOuterAlt(_localctx, 1); - setState(384); + setState(380); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::WITH) { - setState(383); + setState(379); antlrcpp::downCast(_localctx)->with = withClause(); } - setState(386); + setState(382); match(HogQLParser::SELECT); - setState(388); + setState(384); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 31, _ctx)) { case 1: { - setState(387); + setState(383); match(HogQLParser::DISTINCT); break; } @@ -3045,12 +3041,12 @@ HogQLParser::SelectStmtContext* HogQLParser::selectStmt() { default: break; } - setState(391); + setState(387); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 32, _ctx)) { case 1: { - setState(390); + setState(386); topClause(); break; } @@ -3058,57 +3054,57 @@ HogQLParser::SelectStmtContext* HogQLParser::selectStmt() { default: break; } - setState(393); + setState(389); antlrcpp::downCast(_localctx)->columns = columnExprList(); - setState(395); + setState(391); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::FROM) { - setState(394); + setState(390); antlrcpp::downCast(_localctx)->from = fromClause(); } - setState(398); + setState(394); _errHandler->sync(this); _la = _input->LA(1); if ((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 2269391999729696) != 0)) { - setState(397); + setState(393); arrayJoinClause(); } - setState(401); + setState(397); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::PREWHERE) { - setState(400); + setState(396); prewhereClause(); } - setState(404); + setState(400); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::WHERE) { - setState(403); + setState(399); antlrcpp::downCast(_localctx)->where = whereClause(); } - setState(407); + setState(403); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::GROUP) { - setState(406); + setState(402); groupByClause(); } - setState(411); + setState(407); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 38, _ctx)) { case 1: { - setState(409); + setState(405); match(HogQLParser::WITH); - setState(410); + setState(406); _la = _input->LA(1); if (!(_la == HogQLParser::CUBE @@ -3125,51 +3121,51 @@ HogQLParser::SelectStmtContext* HogQLParser::selectStmt() { default: break; } - setState(415); + setState(411); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::WITH) { - setState(413); + setState(409); match(HogQLParser::WITH); - setState(414); + setState(410); match(HogQLParser::TOTALS); } - setState(418); + setState(414); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::HAVING) { - setState(417); + setState(413); havingClause(); } - setState(421); + setState(417); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::WINDOW) { - setState(420); + setState(416); windowClause(); } - setState(424); + setState(420); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::ORDER) { - setState(423); + setState(419); orderByClause(); } - setState(428); + setState(424); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::LIMIT: { - setState(426); + setState(422); limitAndOffsetClause(); break; } case HogQLParser::OFFSET: { - setState(427); + setState(423); offsetOnlyClause(); break; } @@ -3184,12 +3180,12 @@ HogQLParser::SelectStmtContext* HogQLParser::selectStmt() { default: break; } - setState(431); + setState(427); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::SETTINGS) { - setState(430); + setState(426); settingsClause(); } @@ -3243,9 +3239,9 @@ HogQLParser::WithClauseContext* HogQLParser::withClause() { }); try { enterOuterAlt(_localctx, 1); - setState(433); + setState(429); match(HogQLParser::WITH); - setState(434); + setState(430); withExprList(); } @@ -3306,18 +3302,18 @@ HogQLParser::TopClauseContext* HogQLParser::topClause() { }); try { enterOuterAlt(_localctx, 1); - setState(436); + setState(432); match(HogQLParser::TOP); - setState(437); + setState(433); match(HogQLParser::DECIMAL_LITERAL); - setState(440); + setState(436); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 45, _ctx)) { case 1: { - setState(438); + setState(434); match(HogQLParser::WITH); - setState(439); + setState(435); match(HogQLParser::TIES); break; } @@ -3376,9 +3372,9 @@ HogQLParser::FromClauseContext* HogQLParser::fromClause() { }); try { enterOuterAlt(_localctx, 1); - setState(442); + setState(438); match(HogQLParser::FROM); - setState(443); + setState(439); joinExpr(0); } @@ -3444,14 +3440,14 @@ HogQLParser::ArrayJoinClauseContext* HogQLParser::arrayJoinClause() { }); try { enterOuterAlt(_localctx, 1); - setState(446); + setState(442); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::INNER || _la == HogQLParser::LEFT) { - setState(445); + setState(441); _la = _input->LA(1); if (!(_la == HogQLParser::INNER @@ -3463,11 +3459,11 @@ HogQLParser::ArrayJoinClauseContext* HogQLParser::arrayJoinClause() { consume(); } } - setState(448); + setState(444); match(HogQLParser::ARRAY); - setState(449); + setState(445); match(HogQLParser::JOIN); - setState(450); + setState(446); columnExprList(); } @@ -3565,35 +3561,35 @@ HogQLParser::WindowClauseContext* HogQLParser::windowClause() { }); try { enterOuterAlt(_localctx, 1); - setState(452); + setState(448); match(HogQLParser::WINDOW); - setState(453); + setState(449); identifier(); - setState(454); + setState(450); match(HogQLParser::AS); - setState(455); + setState(451); match(HogQLParser::LPAREN); - setState(456); + setState(452); windowExpr(); - setState(457); + setState(453); match(HogQLParser::RPAREN); - setState(467); + setState(463); _errHandler->sync(this); _la = _input->LA(1); while (_la == HogQLParser::COMMA) { - setState(458); + setState(454); match(HogQLParser::COMMA); - setState(459); + setState(455); identifier(); - setState(460); + setState(456); match(HogQLParser::AS); - setState(461); + setState(457); match(HogQLParser::LPAREN); - setState(462); + setState(458); windowExpr(); - setState(463); + setState(459); match(HogQLParser::RPAREN); - setState(469); + setState(465); _errHandler->sync(this); _la = _input->LA(1); } @@ -3648,9 +3644,9 @@ HogQLParser::PrewhereClauseContext* HogQLParser::prewhereClause() { }); try { enterOuterAlt(_localctx, 1); - setState(470); + setState(466); match(HogQLParser::PREWHERE); - setState(471); + setState(467); columnExpr(0); } @@ -3703,9 +3699,9 @@ HogQLParser::WhereClauseContext* HogQLParser::whereClause() { }); try { enterOuterAlt(_localctx, 1); - setState(473); + setState(469); match(HogQLParser::WHERE); - setState(474); + setState(470); columnExpr(0); } @@ -3779,15 +3775,15 @@ HogQLParser::GroupByClauseContext* HogQLParser::groupByClause() { }); try { enterOuterAlt(_localctx, 1); - setState(476); + setState(472); match(HogQLParser::GROUP); - setState(477); + setState(473); match(HogQLParser::BY); - setState(484); + setState(480); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 48, _ctx)) { case 1: { - setState(478); + setState(474); _la = _input->LA(1); if (!(_la == HogQLParser::CUBE @@ -3798,17 +3794,17 @@ HogQLParser::GroupByClauseContext* HogQLParser::groupByClause() { _errHandler->reportMatch(this); consume(); } - setState(479); + setState(475); match(HogQLParser::LPAREN); - setState(480); + setState(476); columnExprList(); - setState(481); + setState(477); match(HogQLParser::RPAREN); break; } case 2: { - setState(483); + setState(479); columnExprList(); break; } @@ -3867,9 +3863,9 @@ HogQLParser::HavingClauseContext* HogQLParser::havingClause() { }); try { enterOuterAlt(_localctx, 1); - setState(486); + setState(482); match(HogQLParser::HAVING); - setState(487); + setState(483); columnExpr(0); } @@ -3926,11 +3922,11 @@ HogQLParser::OrderByClauseContext* HogQLParser::orderByClause() { }); try { enterOuterAlt(_localctx, 1); - setState(489); + setState(485); match(HogQLParser::ORDER); - setState(490); + setState(486); match(HogQLParser::BY); - setState(491); + setState(487); orderExprList(); } @@ -3987,11 +3983,11 @@ HogQLParser::ProjectionOrderByClauseContext* HogQLParser::projectionOrderByClaus }); try { enterOuterAlt(_localctx, 1); - setState(493); + setState(489); match(HogQLParser::ORDER); - setState(494); + setState(490); match(HogQLParser::BY); - setState(495); + setState(491); columnExprList(); } @@ -4072,40 +4068,40 @@ HogQLParser::LimitAndOffsetClauseContext* HogQLParser::limitAndOffsetClause() { exitRule(); }); try { - setState(526); + setState(522); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 53, _ctx)) { case 1: { enterOuterAlt(_localctx, 1); - setState(497); + setState(493); match(HogQLParser::LIMIT); - setState(498); + setState(494); columnExpr(0); - setState(501); + setState(497); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COMMA) { - setState(499); + setState(495); match(HogQLParser::COMMA); - setState(500); + setState(496); columnExpr(0); } - setState(507); + setState(503); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::WITH: { - setState(503); + setState(499); match(HogQLParser::WITH); - setState(504); + setState(500); match(HogQLParser::TIES); break; } case HogQLParser::BY: { - setState(505); + setState(501); match(HogQLParser::BY); - setState(506); + setState(502); columnExprList(); break; } @@ -4125,45 +4121,45 @@ HogQLParser::LimitAndOffsetClauseContext* HogQLParser::limitAndOffsetClause() { case 2: { enterOuterAlt(_localctx, 2); - setState(509); + setState(505); match(HogQLParser::LIMIT); - setState(510); + setState(506); columnExpr(0); - setState(513); + setState(509); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::WITH) { - setState(511); + setState(507); match(HogQLParser::WITH); - setState(512); + setState(508); match(HogQLParser::TIES); } - setState(515); + setState(511); match(HogQLParser::OFFSET); - setState(516); + setState(512); columnExpr(0); break; } case 3: { enterOuterAlt(_localctx, 3); - setState(518); + setState(514); match(HogQLParser::LIMIT); - setState(519); + setState(515); columnExpr(0); - setState(520); + setState(516); match(HogQLParser::OFFSET); - setState(521); + setState(517); columnExpr(0); - setState(524); + setState(520); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::BY) { - setState(522); + setState(518); match(HogQLParser::BY); - setState(523); + setState(519); columnExprList(); } break; @@ -4223,9 +4219,9 @@ HogQLParser::OffsetOnlyClauseContext* HogQLParser::offsetOnlyClause() { }); try { enterOuterAlt(_localctx, 1); - setState(528); + setState(524); match(HogQLParser::OFFSET); - setState(529); + setState(525); columnExpr(0); } @@ -4278,9 +4274,9 @@ HogQLParser::SettingsClauseContext* HogQLParser::settingsClause() { }); try { enterOuterAlt(_localctx, 1); - setState(531); + setState(527); match(HogQLParser::SETTINGS); - setState(532); + setState(528); settingExprList(); } @@ -4434,7 +4430,7 @@ HogQLParser::JoinExprContext* HogQLParser::joinExpr(int precedence) { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(546); + setState(542); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 56, _ctx)) { case 1: { @@ -4442,14 +4438,14 @@ HogQLParser::JoinExprContext* HogQLParser::joinExpr(int precedence) { _ctx = _localctx; previousContext = _localctx; - setState(535); + setState(531); tableExpr(0); - setState(537); + setState(533); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 54, _ctx)) { case 1: { - setState(536); + setState(532); match(HogQLParser::FINAL); break; } @@ -4457,12 +4453,12 @@ HogQLParser::JoinExprContext* HogQLParser::joinExpr(int precedence) { default: break; } - setState(540); + setState(536); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 55, _ctx)) { case 1: { - setState(539); + setState(535); sampleClause(); break; } @@ -4477,11 +4473,11 @@ HogQLParser::JoinExprContext* HogQLParser::joinExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(542); + setState(538); match(HogQLParser::LPAREN); - setState(543); + setState(539); joinExpr(0); - setState(544); + setState(540); match(HogQLParser::RPAREN); break; } @@ -4490,7 +4486,7 @@ HogQLParser::JoinExprContext* HogQLParser::joinExpr(int precedence) { break; } _ctx->stop = _input->LT(-1); - setState(562); + setState(558); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 59, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { @@ -4498,19 +4494,19 @@ HogQLParser::JoinExprContext* HogQLParser::joinExpr(int precedence) { if (!_parseListeners.empty()) triggerExitRuleEvent(); previousContext = _localctx; - setState(560); + setState(556); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 58, _ctx)) { case 1: { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleJoinExpr); - setState(548); + setState(544); if (!(precpred(_ctx, 3))) throw FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(549); + setState(545); joinOpCross(); - setState(550); + setState(546); joinExpr(4); break; } @@ -4519,10 +4515,10 @@ HogQLParser::JoinExprContext* HogQLParser::joinExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleJoinExpr); - setState(552); + setState(548); if (!(precpred(_ctx, 4))) throw FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(554); + setState(550); _errHandler->sync(this); _la = _input->LA(1); @@ -4530,14 +4526,14 @@ HogQLParser::JoinExprContext* HogQLParser::joinExpr(int precedence) { ((1ULL << _la) & 2269426359468314) != 0) || _la == HogQLParser::RIGHT || _la == HogQLParser::SEMI) { - setState(553); + setState(549); joinOp(); } - setState(556); + setState(552); match(HogQLParser::JOIN); - setState(557); + setState(553); joinExpr(0); - setState(558); + setState(554); joinConstraintClause(); break; } @@ -4546,7 +4542,7 @@ HogQLParser::JoinExprContext* HogQLParser::joinExpr(int precedence) { break; } } - setState(564); + setState(560); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 59, _ctx); } @@ -4684,23 +4680,23 @@ HogQLParser::JoinOpContext* HogQLParser::joinOp() { exitRule(); }); try { - setState(608); + setState(604); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 73, _ctx)) { case 1: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 1); - setState(574); + setState(570); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 62, _ctx)) { case 1: { - setState(566); + setState(562); _errHandler->sync(this); _la = _input->LA(1); if ((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 274) != 0)) { - setState(565); + setState(561); _la = _input->LA(1); if (!((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 274) != 0))) { @@ -4711,21 +4707,21 @@ HogQLParser::JoinOpContext* HogQLParser::joinOp() { consume(); } } - setState(568); + setState(564); match(HogQLParser::INNER); break; } case 2: { - setState(569); + setState(565); match(HogQLParser::INNER); - setState(571); + setState(567); _errHandler->sync(this); _la = _input->LA(1); if ((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 274) != 0)) { - setState(570); + setState(566); _la = _input->LA(1); if (!((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 274) != 0))) { @@ -4740,7 +4736,7 @@ HogQLParser::JoinOpContext* HogQLParser::joinOp() { } case 3: { - setState(573); + setState(569); _la = _input->LA(1); if (!((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 274) != 0))) { @@ -4762,17 +4758,17 @@ HogQLParser::JoinOpContext* HogQLParser::joinOp() { case 2: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 2); - setState(590); + setState(586); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 67, _ctx)) { case 1: { - setState(577); + setState(573); _errHandler->sync(this); _la = _input->LA(1); if ((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 282) != 0) || _la == HogQLParser::SEMI) { - setState(576); + setState(572); _la = _input->LA(1); if (!((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 282) != 0) || _la == HogQLParser::SEMI)) { @@ -4783,7 +4779,7 @@ HogQLParser::JoinOpContext* HogQLParser::joinOp() { consume(); } } - setState(579); + setState(575); _la = _input->LA(1); if (!(_la == HogQLParser::LEFT @@ -4794,19 +4790,19 @@ HogQLParser::JoinOpContext* HogQLParser::joinOp() { _errHandler->reportMatch(this); consume(); } - setState(581); + setState(577); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::OUTER) { - setState(580); + setState(576); match(HogQLParser::OUTER); } break; } case 2: { - setState(583); + setState(579); _la = _input->LA(1); if (!(_la == HogQLParser::LEFT @@ -4817,21 +4813,21 @@ HogQLParser::JoinOpContext* HogQLParser::joinOp() { _errHandler->reportMatch(this); consume(); } - setState(585); + setState(581); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::OUTER) { - setState(584); + setState(580); match(HogQLParser::OUTER); } - setState(588); + setState(584); _errHandler->sync(this); _la = _input->LA(1); if ((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 282) != 0) || _la == HogQLParser::SEMI) { - setState(587); + setState(583); _la = _input->LA(1); if (!((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 282) != 0) || _la == HogQLParser::SEMI)) { @@ -4854,18 +4850,18 @@ HogQLParser::JoinOpContext* HogQLParser::joinOp() { case 3: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 3); - setState(606); + setState(602); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 72, _ctx)) { case 1: { - setState(593); + setState(589); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::ALL || _la == HogQLParser::ANY) { - setState(592); + setState(588); _la = _input->LA(1); if (!(_la == HogQLParser::ALL @@ -4877,38 +4873,38 @@ HogQLParser::JoinOpContext* HogQLParser::joinOp() { consume(); } } - setState(595); + setState(591); match(HogQLParser::FULL); - setState(597); + setState(593); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::OUTER) { - setState(596); + setState(592); match(HogQLParser::OUTER); } break; } case 2: { - setState(599); + setState(595); match(HogQLParser::FULL); - setState(601); + setState(597); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::OUTER) { - setState(600); + setState(596); match(HogQLParser::OUTER); } - setState(604); + setState(600); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::ALL || _la == HogQLParser::ANY) { - setState(603); + setState(599); _la = _input->LA(1); if (!(_la == HogQLParser::ALL @@ -4986,21 +4982,21 @@ HogQLParser::JoinOpCrossContext* HogQLParser::joinOpCross() { exitRule(); }); try { - setState(613); + setState(609); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::CROSS: { enterOuterAlt(_localctx, 1); - setState(610); + setState(606); match(HogQLParser::CROSS); - setState(611); + setState(607); match(HogQLParser::JOIN); break; } case HogQLParser::COMMA: { enterOuterAlt(_localctx, 2); - setState(612); + setState(608); match(HogQLParser::COMMA); break; } @@ -5070,36 +5066,36 @@ HogQLParser::JoinConstraintClauseContext* HogQLParser::joinConstraintClause() { exitRule(); }); try { - setState(624); + setState(620); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 75, _ctx)) { case 1: { enterOuterAlt(_localctx, 1); - setState(615); + setState(611); match(HogQLParser::ON); - setState(616); + setState(612); columnExprList(); break; } case 2: { enterOuterAlt(_localctx, 2); - setState(617); + setState(613); match(HogQLParser::USING); - setState(618); + setState(614); match(HogQLParser::LPAREN); - setState(619); + setState(615); columnExprList(); - setState(620); + setState(616); match(HogQLParser::RPAREN); break; } case 3: { enterOuterAlt(_localctx, 3); - setState(622); + setState(618); match(HogQLParser::USING); - setState(623); + setState(619); columnExprList(); break; } @@ -5166,18 +5162,18 @@ HogQLParser::SampleClauseContext* HogQLParser::sampleClause() { }); try { enterOuterAlt(_localctx, 1); - setState(626); + setState(622); match(HogQLParser::SAMPLE); - setState(627); + setState(623); ratioExpr(); - setState(630); + setState(626); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 76, _ctx)) { case 1: { - setState(628); + setState(624); match(HogQLParser::OFFSET); - setState(629); + setState(625); ratioExpr(); break; } @@ -5245,17 +5241,17 @@ HogQLParser::OrderExprListContext* HogQLParser::orderExprList() { }); try { enterOuterAlt(_localctx, 1); - setState(632); + setState(628); orderExpr(); - setState(637); + setState(633); _errHandler->sync(this); _la = _input->LA(1); while (_la == HogQLParser::COMMA) { - setState(633); + setState(629); match(HogQLParser::COMMA); - setState(634); + setState(630); orderExpr(); - setState(639); + setState(635); _errHandler->sync(this); _la = _input->LA(1); } @@ -5339,15 +5335,15 @@ HogQLParser::OrderExprContext* HogQLParser::orderExpr() { }); try { enterOuterAlt(_localctx, 1); - setState(640); + setState(636); columnExpr(0); - setState(642); + setState(638); _errHandler->sync(this); _la = _input->LA(1); if ((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 12583040) != 0)) { - setState(641); + setState(637); _la = _input->LA(1); if (!((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 12583040) != 0))) { @@ -5358,14 +5354,14 @@ HogQLParser::OrderExprContext* HogQLParser::orderExpr() { consume(); } } - setState(646); + setState(642); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::NULLS) { - setState(644); + setState(640); match(HogQLParser::NULLS); - setState(645); + setState(641); _la = _input->LA(1); if (!(_la == HogQLParser::FIRST @@ -5377,14 +5373,14 @@ HogQLParser::OrderExprContext* HogQLParser::orderExpr() { consume(); } } - setState(650); + setState(646); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COLLATE) { - setState(648); + setState(644); match(HogQLParser::COLLATE); - setState(649); + setState(645); match(HogQLParser::STRING_LITERAL); } @@ -5445,12 +5441,12 @@ HogQLParser::RatioExprContext* HogQLParser::ratioExpr() { exitRule(); }); try { - setState(658); + setState(654); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::LBRACE: { enterOuterAlt(_localctx, 1); - setState(652); + setState(648); placeholder(); break; } @@ -5465,16 +5461,16 @@ HogQLParser::RatioExprContext* HogQLParser::ratioExpr() { case HogQLParser::DOT: case HogQLParser::PLUS: { enterOuterAlt(_localctx, 2); - setState(653); + setState(649); numberLiteral(); - setState(656); + setState(652); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 81, _ctx)) { case 1: { - setState(654); + setState(650); match(HogQLParser::SLASH); - setState(655); + setState(651); numberLiteral(); break; } @@ -5548,17 +5544,17 @@ HogQLParser::SettingExprListContext* HogQLParser::settingExprList() { }); try { enterOuterAlt(_localctx, 1); - setState(660); + setState(656); settingExpr(); - setState(665); + setState(661); _errHandler->sync(this); _la = _input->LA(1); while (_la == HogQLParser::COMMA) { - setState(661); + setState(657); match(HogQLParser::COMMA); - setState(662); + setState(658); settingExpr(); - setState(667); + setState(663); _errHandler->sync(this); _la = _input->LA(1); } @@ -5617,11 +5613,11 @@ HogQLParser::SettingExprContext* HogQLParser::settingExpr() { }); try { enterOuterAlt(_localctx, 1); - setState(668); + setState(664); identifier(); - setState(669); + setState(665); match(HogQLParser::EQ_SINGLE); - setState(670); + setState(666); literal(); } @@ -5679,30 +5675,30 @@ HogQLParser::WindowExprContext* HogQLParser::windowExpr() { }); try { enterOuterAlt(_localctx, 1); - setState(673); + setState(669); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::PARTITION) { - setState(672); + setState(668); winPartitionByClause(); } - setState(676); + setState(672); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::ORDER) { - setState(675); + setState(671); winOrderByClause(); } - setState(679); + setState(675); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::RANGE || _la == HogQLParser::ROWS) { - setState(678); + setState(674); winFrameClause(); } @@ -5760,11 +5756,11 @@ HogQLParser::WinPartitionByClauseContext* HogQLParser::winPartitionByClause() { }); try { enterOuterAlt(_localctx, 1); - setState(681); + setState(677); match(HogQLParser::PARTITION); - setState(682); + setState(678); match(HogQLParser::BY); - setState(683); + setState(679); columnExprList(); } @@ -5821,11 +5817,11 @@ HogQLParser::WinOrderByClauseContext* HogQLParser::winOrderByClause() { }); try { enterOuterAlt(_localctx, 1); - setState(685); + setState(681); match(HogQLParser::ORDER); - setState(686); + setState(682); match(HogQLParser::BY); - setState(687); + setState(683); orderExprList(); } @@ -5883,7 +5879,7 @@ HogQLParser::WinFrameClauseContext* HogQLParser::winFrameClause() { }); try { enterOuterAlt(_localctx, 1); - setState(689); + setState(685); _la = _input->LA(1); if (!(_la == HogQLParser::RANGE @@ -5894,7 +5890,7 @@ HogQLParser::WinFrameClauseContext* HogQLParser::winFrameClause() { _errHandler->reportMatch(this); consume(); } - setState(690); + setState(686); winFrameExtend(); } @@ -5976,7 +5972,7 @@ HogQLParser::WinFrameExtendContext* HogQLParser::winFrameExtend() { exitRule(); }); try { - setState(698); + setState(694); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::CURRENT: @@ -5992,7 +5988,7 @@ HogQLParser::WinFrameExtendContext* HogQLParser::winFrameExtend() { case HogQLParser::PLUS: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 1); - setState(692); + setState(688); winFrameBound(); break; } @@ -6000,13 +5996,13 @@ HogQLParser::WinFrameExtendContext* HogQLParser::winFrameExtend() { case HogQLParser::BETWEEN: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 2); - setState(693); + setState(689); match(HogQLParser::BETWEEN); - setState(694); + setState(690); winFrameBound(); - setState(695); + setState(691); match(HogQLParser::AND); - setState(696); + setState(692); winFrameBound(); break; } @@ -6081,45 +6077,45 @@ HogQLParser::WinFrameBoundContext* HogQLParser::winFrameBound() { }); try { enterOuterAlt(_localctx, 1); - setState(712); + setState(708); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 88, _ctx)) { case 1: { - setState(700); + setState(696); match(HogQLParser::CURRENT); - setState(701); + setState(697); match(HogQLParser::ROW); break; } case 2: { - setState(702); + setState(698); match(HogQLParser::UNBOUNDED); - setState(703); + setState(699); match(HogQLParser::PRECEDING); break; } case 3: { - setState(704); + setState(700); match(HogQLParser::UNBOUNDED); - setState(705); + setState(701); match(HogQLParser::FOLLOWING); break; } case 4: { - setState(706); + setState(702); numberLiteral(); - setState(707); + setState(703); match(HogQLParser::PRECEDING); break; } case 5: { - setState(709); + setState(705); numberLiteral(); - setState(710); + setState(706); match(HogQLParser::FOLLOWING); break; } @@ -6178,9 +6174,9 @@ HogQLParser::ExprContext* HogQLParser::expr() { }); try { enterOuterAlt(_localctx, 1); - setState(714); + setState(710); columnExpr(0); - setState(715); + setState(711); match(HogQLParser::EOF); } @@ -6385,13 +6381,13 @@ HogQLParser::ColumnTypeExprContext* HogQLParser::columnTypeExpr() { }); try { size_t alt; - setState(773); + setState(769); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 96, _ctx)) { case 1: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 1); - setState(717); + setState(713); identifier(); break; } @@ -6399,39 +6395,39 @@ HogQLParser::ColumnTypeExprContext* HogQLParser::columnTypeExpr() { case 2: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 2); - setState(718); + setState(714); identifier(); - setState(719); + setState(715); match(HogQLParser::LPAREN); - setState(720); + setState(716); identifier(); - setState(721); + setState(717); columnTypeExpr(); - setState(728); + setState(724); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 89, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { if (alt == 1) { - setState(722); + setState(718); match(HogQLParser::COMMA); - setState(723); + setState(719); identifier(); - setState(724); + setState(720); columnTypeExpr(); } - setState(730); + setState(726); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 89, _ctx); } - setState(732); + setState(728); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COMMA) { - setState(731); + setState(727); match(HogQLParser::COMMA); } - setState(734); + setState(730); match(HogQLParser::RPAREN); break; } @@ -6439,35 +6435,35 @@ HogQLParser::ColumnTypeExprContext* HogQLParser::columnTypeExpr() { case 3: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 3); - setState(736); + setState(732); identifier(); - setState(737); + setState(733); match(HogQLParser::LPAREN); - setState(738); + setState(734); enumValue(); - setState(743); + setState(739); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 91, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { if (alt == 1) { - setState(739); + setState(735); match(HogQLParser::COMMA); - setState(740); + setState(736); enumValue(); } - setState(745); + setState(741); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 91, _ctx); } - setState(747); + setState(743); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COMMA) { - setState(746); + setState(742); match(HogQLParser::COMMA); } - setState(749); + setState(745); match(HogQLParser::RPAREN); break; } @@ -6475,35 +6471,35 @@ HogQLParser::ColumnTypeExprContext* HogQLParser::columnTypeExpr() { case 4: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 4); - setState(751); + setState(747); identifier(); - setState(752); + setState(748); match(HogQLParser::LPAREN); - setState(753); + setState(749); columnTypeExpr(); - setState(758); + setState(754); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 93, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { if (alt == 1) { - setState(754); + setState(750); match(HogQLParser::COMMA); - setState(755); + setState(751); columnTypeExpr(); } - setState(760); + setState(756); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 93, _ctx); } - setState(762); + setState(758); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COMMA) { - setState(761); + setState(757); match(HogQLParser::COMMA); } - setState(764); + setState(760); match(HogQLParser::RPAREN); break; } @@ -6511,11 +6507,11 @@ HogQLParser::ColumnTypeExprContext* HogQLParser::columnTypeExpr() { case 5: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 5); - setState(766); + setState(762); identifier(); - setState(767); + setState(763); match(HogQLParser::LPAREN); - setState(769); + setState(765); _errHandler->sync(this); _la = _input->LA(1); @@ -6523,10 +6519,10 @@ HogQLParser::ColumnTypeExprContext* HogQLParser::columnTypeExpr() { ((1ULL << _la) & -4503602311741442) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493036243451903) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 18455) != 0)) { - setState(768); + setState(764); columnExprList(); } - setState(771); + setState(767); match(HogQLParser::RPAREN); break; } @@ -6594,28 +6590,28 @@ HogQLParser::ColumnExprListContext* HogQLParser::columnExprList() { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(775); + setState(771); columnExpr(0); - setState(780); + setState(776); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 97, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { if (alt == 1) { - setState(776); + setState(772); match(HogQLParser::COMMA); - setState(777); + setState(773); columnExpr(0); } - setState(782); + setState(778); _errHandler->sync(this); alt = getInterpreter()->adaptivePredict(_input, 97, _ctx); } - setState(784); + setState(780); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 98, _ctx)) { case 1: { - setState(783); + setState(779); match(HogQLParser::COMMA); break; } @@ -7159,16 +7155,16 @@ tree::TerminalNode* HogQLParser::ColumnExprWinFunctionTargetContext::RPAREN(size return getToken(HogQLParser::RPAREN, i); } -HogQLParser::ColumnExprListContext* HogQLParser::ColumnExprWinFunctionTargetContext::columnExprList() { - return getRuleContext(0); +std::vector HogQLParser::ColumnExprWinFunctionTargetContext::columnExprList() { + return getRuleContexts(); } -tree::TerminalNode* HogQLParser::ColumnExprWinFunctionTargetContext::DISTINCT() { - return getToken(HogQLParser::DISTINCT, 0); +HogQLParser::ColumnExprListContext* HogQLParser::ColumnExprWinFunctionTargetContext::columnExprList(size_t i) { + return getRuleContext(i); } -HogQLParser::ColumnArgListContext* HogQLParser::ColumnExprWinFunctionTargetContext::columnArgList() { - return getRuleContext(0); +tree::TerminalNode* HogQLParser::ColumnExprWinFunctionTargetContext::DISTINCT() { + return getToken(HogQLParser::DISTINCT, 0); } HogQLParser::ColumnExprWinFunctionTargetContext::ColumnExprWinFunctionTargetContext(ColumnExprContext *ctx) { copyFrom(ctx); } @@ -7303,6 +7299,33 @@ std::any HogQLParser::ColumnExprTupleContext::accept(tree::ParseTreeVisitor *vis else return visitor->visitChildren(this); } +//----------------- ColumnExprCallContext ------------------------------------------------------------------ + +HogQLParser::ColumnExprContext* HogQLParser::ColumnExprCallContext::columnExpr() { + return getRuleContext(0); +} + +tree::TerminalNode* HogQLParser::ColumnExprCallContext::LPAREN() { + return getToken(HogQLParser::LPAREN, 0); +} + +tree::TerminalNode* HogQLParser::ColumnExprCallContext::RPAREN() { + return getToken(HogQLParser::RPAREN, 0); +} + +HogQLParser::ColumnExprListContext* HogQLParser::ColumnExprCallContext::columnExprList() { + return getRuleContext(0); +} + +HogQLParser::ColumnExprCallContext::ColumnExprCallContext(ColumnExprContext *ctx) { copyFrom(ctx); } + + +std::any HogQLParser::ColumnExprCallContext::accept(tree::ParseTreeVisitor *visitor) { + if (auto parserVisitor = dynamic_cast(visitor)) + return parserVisitor->visitColumnExprCall(this); + else + return visitor->visitChildren(this); +} //----------------- ColumnExprArrayAccessContext ------------------------------------------------------------------ std::vector HogQLParser::ColumnExprArrayAccessContext::columnExpr() { @@ -7641,16 +7664,16 @@ tree::TerminalNode* HogQLParser::ColumnExprWinFunctionContext::RPAREN(size_t i) return getToken(HogQLParser::RPAREN, i); } -HogQLParser::ColumnExprListContext* HogQLParser::ColumnExprWinFunctionContext::columnExprList() { - return getRuleContext(0); +std::vector HogQLParser::ColumnExprWinFunctionContext::columnExprList() { + return getRuleContexts(); } -tree::TerminalNode* HogQLParser::ColumnExprWinFunctionContext::DISTINCT() { - return getToken(HogQLParser::DISTINCT, 0); +HogQLParser::ColumnExprListContext* HogQLParser::ColumnExprWinFunctionContext::columnExprList(size_t i) { + return getRuleContext(i); } -HogQLParser::ColumnArgListContext* HogQLParser::ColumnExprWinFunctionContext::columnArgList() { - return getRuleContext(0); +tree::TerminalNode* HogQLParser::ColumnExprWinFunctionContext::DISTINCT() { + return getToken(HogQLParser::DISTINCT, 0); } HogQLParser::ColumnExprWinFunctionContext::ColumnExprWinFunctionContext(ColumnExprContext *ctx) { copyFrom(ctx); } @@ -7662,6 +7685,21 @@ std::any HogQLParser::ColumnExprWinFunctionContext::accept(tree::ParseTreeVisito else return visitor->visitChildren(this); } +//----------------- ColumnExprLambdaContext ------------------------------------------------------------------ + +HogQLParser::ColumnLambdaExprContext* HogQLParser::ColumnExprLambdaContext::columnLambdaExpr() { + return getRuleContext(0); +} + +HogQLParser::ColumnExprLambdaContext::ColumnExprLambdaContext(ColumnExprContext *ctx) { copyFrom(ctx); } + + +std::any HogQLParser::ColumnExprLambdaContext::accept(tree::ParseTreeVisitor *visitor) { + if (auto parserVisitor = dynamic_cast(visitor)) + return parserVisitor->visitColumnExprLambda(this); + else + return visitor->visitChildren(this); +} //----------------- ColumnExprIdentifierContext ------------------------------------------------------------------ HogQLParser::ColumnIdentifierContext* HogQLParser::ColumnExprIdentifierContext::columnIdentifier() { @@ -7703,12 +7741,12 @@ tree::TerminalNode* HogQLParser::ColumnExprFunctionContext::DISTINCT() { return getToken(HogQLParser::DISTINCT, 0); } -HogQLParser::ColumnArgListContext* HogQLParser::ColumnExprFunctionContext::columnArgList() { - return getRuleContext(0); +std::vector HogQLParser::ColumnExprFunctionContext::columnExprList() { + return getRuleContexts(); } -HogQLParser::ColumnExprListContext* HogQLParser::ColumnExprFunctionContext::columnExprList() { - return getRuleContext(0); +HogQLParser::ColumnExprListContext* HogQLParser::ColumnExprFunctionContext::columnExprList(size_t i) { + return getRuleContext(i); } HogQLParser::ColumnExprFunctionContext::ColumnExprFunctionContext(ColumnExprContext *ctx) { copyFrom(ctx); } @@ -7769,7 +7807,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(935); + setState(932); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 118, _ctx)) { case 1: { @@ -7777,14 +7815,14 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _ctx = _localctx; previousContext = _localctx; - setState(787); + setState(783); match(HogQLParser::CASE); - setState(789); + setState(785); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 99, _ctx)) { case 1: { - setState(788); + setState(784); antlrcpp::downCast(_localctx)->caseExpr = columnExpr(0); break; } @@ -7792,33 +7830,33 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: break; } - setState(796); + setState(792); _errHandler->sync(this); _la = _input->LA(1); do { - setState(791); + setState(787); match(HogQLParser::WHEN); - setState(792); + setState(788); antlrcpp::downCast(_localctx)->whenExpr = columnExpr(0); - setState(793); + setState(789); match(HogQLParser::THEN); - setState(794); + setState(790); antlrcpp::downCast(_localctx)->thenExpr = columnExpr(0); - setState(798); + setState(794); _errHandler->sync(this); _la = _input->LA(1); } while (_la == HogQLParser::WHEN); - setState(802); + setState(798); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::ELSE) { - setState(800); + setState(796); match(HogQLParser::ELSE); - setState(801); + setState(797); antlrcpp::downCast(_localctx)->elseExpr = columnExpr(0); } - setState(804); + setState(800); match(HogQLParser::END); break; } @@ -7827,17 +7865,17 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(806); + setState(802); match(HogQLParser::CAST); - setState(807); + setState(803); match(HogQLParser::LPAREN); - setState(808); + setState(804); columnExpr(0); - setState(809); + setState(805); match(HogQLParser::AS); - setState(810); + setState(806); columnTypeExpr(); - setState(811); + setState(807); match(HogQLParser::RPAREN); break; } @@ -7846,9 +7884,9 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(813); + setState(809); match(HogQLParser::DATE); - setState(814); + setState(810); match(HogQLParser::STRING_LITERAL); break; } @@ -7857,11 +7895,11 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(815); + setState(811); match(HogQLParser::INTERVAL); - setState(816); + setState(812); columnExpr(0); - setState(817); + setState(813); interval(); break; } @@ -7870,27 +7908,27 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(819); + setState(815); match(HogQLParser::SUBSTRING); - setState(820); + setState(816); match(HogQLParser::LPAREN); - setState(821); + setState(817); columnExpr(0); - setState(822); + setState(818); match(HogQLParser::FROM); - setState(823); + setState(819); columnExpr(0); - setState(826); + setState(822); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::FOR) { - setState(824); + setState(820); match(HogQLParser::FOR); - setState(825); + setState(821); columnExpr(0); } - setState(828); + setState(824); match(HogQLParser::RPAREN); break; } @@ -7899,9 +7937,9 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(830); + setState(826); match(HogQLParser::TIMESTAMP); - setState(831); + setState(827); match(HogQLParser::STRING_LITERAL); break; } @@ -7910,11 +7948,11 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(832); + setState(828); match(HogQLParser::TRIM); - setState(833); + setState(829); match(HogQLParser::LPAREN); - setState(834); + setState(830); _la = _input->LA(1); if (!(_la == HogQLParser::BOTH @@ -7925,13 +7963,13 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _errHandler->reportMatch(this); consume(); } - setState(835); + setState(831); string(); - setState(836); + setState(832); match(HogQLParser::FROM); - setState(837); + setState(833); columnExpr(0); - setState(838); + setState(834); match(HogQLParser::RPAREN); break; } @@ -7940,12 +7978,12 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(840); + setState(836); identifier(); - setState(841); + setState(837); match(HogQLParser::LPAREN); - setState(843); + setState(839); _errHandler->sync(this); _la = _input->LA(1); @@ -7953,24 +7991,24 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << _la) & -4503602311741442) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493036243451903) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 18455) != 0)) { - setState(842); - columnExprList(); + setState(838); + antlrcpp::downCast(_localctx)->columnExprs = columnExprList(); } - setState(845); + setState(841); match(HogQLParser::RPAREN); - setState(855); + setState(851); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::LPAREN) { - setState(847); + setState(843); match(HogQLParser::LPAREN); - setState(849); + setState(845); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 104, _ctx)) { case 1: { - setState(848); + setState(844); match(HogQLParser::DISTINCT); break; } @@ -7978,7 +8016,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: break; } - setState(852); + setState(848); _errHandler->sync(this); _la = _input->LA(1); @@ -7986,19 +8024,19 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << _la) & -4503602311741442) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493036243451903) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 18455) != 0)) { - setState(851); - columnArgList(); + setState(847); + antlrcpp::downCast(_localctx)->columnArgList = columnExprList(); } - setState(854); + setState(850); match(HogQLParser::RPAREN); } - setState(857); + setState(853); match(HogQLParser::OVER); - setState(858); + setState(854); match(HogQLParser::LPAREN); - setState(859); + setState(855); windowExpr(); - setState(860); + setState(856); match(HogQLParser::RPAREN); break; } @@ -8007,12 +8045,12 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(862); + setState(858); identifier(); - setState(863); + setState(859); match(HogQLParser::LPAREN); - setState(865); + setState(861); _errHandler->sync(this); _la = _input->LA(1); @@ -8020,24 +8058,24 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << _la) & -4503602311741442) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493036243451903) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 18455) != 0)) { - setState(864); - columnExprList(); + setState(860); + antlrcpp::downCast(_localctx)->columnExprs = columnExprList(); } - setState(867); + setState(863); match(HogQLParser::RPAREN); - setState(877); + setState(873); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::LPAREN) { - setState(869); + setState(865); match(HogQLParser::LPAREN); - setState(871); + setState(867); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 108, _ctx)) { case 1: { - setState(870); + setState(866); match(HogQLParser::DISTINCT); break; } @@ -8045,7 +8083,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: break; } - setState(874); + setState(870); _errHandler->sync(this); _la = _input->LA(1); @@ -8053,15 +8091,15 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << _la) & -4503602311741442) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493036243451903) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 18455) != 0)) { - setState(873); - columnArgList(); + setState(869); + antlrcpp::downCast(_localctx)->columnArgList = columnExprList(); } - setState(876); + setState(872); match(HogQLParser::RPAREN); } - setState(879); + setState(875); match(HogQLParser::OVER); - setState(880); + setState(876); identifier(); break; } @@ -8070,16 +8108,16 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(882); + setState(878); identifier(); - setState(888); + setState(884); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 112, _ctx)) { case 1: { - setState(883); + setState(879); match(HogQLParser::LPAREN); - setState(885); + setState(881); _errHandler->sync(this); _la = _input->LA(1); @@ -8087,10 +8125,10 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << _la) & -4503602311741442) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493036243451903) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 18455) != 0)) { - setState(884); - columnExprList(); + setState(880); + antlrcpp::downCast(_localctx)->columnExprs = columnExprList(); } - setState(887); + setState(883); match(HogQLParser::RPAREN); break; } @@ -8098,14 +8136,14 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: break; } - setState(890); + setState(886); match(HogQLParser::LPAREN); - setState(892); + setState(888); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 113, _ctx)) { case 1: { - setState(891); + setState(887); match(HogQLParser::DISTINCT); break; } @@ -8113,7 +8151,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: break; } - setState(895); + setState(891); _errHandler->sync(this); _la = _input->LA(1); @@ -8121,10 +8159,10 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << _la) & -4503602311741442) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493036243451903) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 18455) != 0)) { - setState(894); - columnArgList(); + setState(890); + antlrcpp::downCast(_localctx)->columnArgList = columnExprList(); } - setState(897); + setState(893); match(HogQLParser::RPAREN); break; } @@ -8133,7 +8171,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(899); + setState(895); hogqlxTagElement(); break; } @@ -8142,7 +8180,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(900); + setState(896); templateString(); break; } @@ -8151,7 +8189,7 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(901); + setState(897); literal(); break; } @@ -8160,10 +8198,10 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(902); + setState(898); match(HogQLParser::DASH); - setState(903); - columnExpr(19); + setState(899); + columnExpr(20); break; } @@ -8171,10 +8209,10 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(904); + setState(900); match(HogQLParser::NOT); - setState(905); - columnExpr(13); + setState(901); + columnExpr(14); break; } @@ -8182,19 +8220,19 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(909); + setState(905); _errHandler->sync(this); _la = _input->LA(1); if ((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & -725088338784043010) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 3229277487103) != 0)) { - setState(906); + setState(902); tableIdentifier(); - setState(907); + setState(903); match(HogQLParser::DOT); } - setState(911); + setState(907); match(HogQLParser::ASTERISK); break; } @@ -8203,11 +8241,11 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(912); + setState(908); match(HogQLParser::LPAREN); - setState(913); + setState(909); selectUnionStmt(); - setState(914); + setState(910); match(HogQLParser::RPAREN); break; } @@ -8216,11 +8254,11 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(916); + setState(912); match(HogQLParser::LPAREN); - setState(917); + setState(913); columnExpr(0); - setState(918); + setState(914); match(HogQLParser::RPAREN); break; } @@ -8229,11 +8267,11 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(920); + setState(916); match(HogQLParser::LPAREN); - setState(921); + setState(917); columnExprList(); - setState(922); + setState(918); match(HogQLParser::RPAREN); break; } @@ -8242,9 +8280,9 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(924); + setState(920); match(HogQLParser::LBRACKET); - setState(926); + setState(922); _errHandler->sync(this); _la = _input->LA(1); @@ -8252,10 +8290,10 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << _la) & -4503602311741442) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493036243451903) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 18455) != 0)) { - setState(925); + setState(921); columnExprList(); } - setState(928); + setState(924); match(HogQLParser::RBRACKET); break; } @@ -8264,9 +8302,9 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(929); + setState(925); match(HogQLParser::LBRACE); - setState(931); + setState(927); _errHandler->sync(this); _la = _input->LA(1); @@ -8274,19 +8312,28 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { ((1ULL << _la) & -4503602311741442) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493036243451903) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 18455) != 0)) { - setState(930); + setState(926); kvPairList(); } - setState(933); + setState(929); match(HogQLParser::RBRACE); break; } case 22: { + _localctx = _tracker.createInstance(_localctx); + _ctx = _localctx; + previousContext = _localctx; + setState(930); + columnLambdaExpr(); + break; + } + + case 23: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(934); + setState(931); columnIdentifier(); break; } @@ -8295,42 +8342,42 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { break; } _ctx->stop = _input->LT(-1); - setState(1041); + setState(1044); _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 129, _ctx); + alt = getInterpreter()->adaptivePredict(_input, 130, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { if (alt == 1) { if (!_parseListeners.empty()) triggerExitRuleEvent(); previousContext = _localctx; - setState(1039); + setState(1042); _errHandler->sync(this); - switch (getInterpreter()->adaptivePredict(_input, 128, _ctx)) { + switch (getInterpreter()->adaptivePredict(_input, 129, _ctx)) { case 1: { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; newContext->left = previousContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(937); + setState(934); - if (!(precpred(_ctx, 18))) throw FailedPredicateException(this, "precpred(_ctx, 18)"); - setState(941); + if (!(precpred(_ctx, 19))) throw FailedPredicateException(this, "precpred(_ctx, 19)"); + setState(938); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::ASTERISK: { - setState(938); + setState(935); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::ASTERISK); break; } case HogQLParser::SLASH: { - setState(939); + setState(936); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::SLASH); break; } case HogQLParser::PERCENT: { - setState(940); + setState(937); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::PERCENT); break; } @@ -8338,8 +8385,8 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: throw NoViableAltException(this); } - setState(943); - antlrcpp::downCast(_localctx)->right = columnExpr(19); + setState(940); + antlrcpp::downCast(_localctx)->right = columnExpr(20); break; } @@ -8348,26 +8395,26 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = newContext; newContext->left = previousContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(944); + setState(941); - if (!(precpred(_ctx, 17))) throw FailedPredicateException(this, "precpred(_ctx, 17)"); - setState(948); + if (!(precpred(_ctx, 18))) throw FailedPredicateException(this, "precpred(_ctx, 18)"); + setState(945); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::PLUS: { - setState(945); + setState(942); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::PLUS); break; } case HogQLParser::DASH: { - setState(946); + setState(943); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::DASH); break; } case HogQLParser::CONCAT: { - setState(947); + setState(944); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::CONCAT); break; } @@ -8375,8 +8422,8 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: throw NoViableAltException(this); } - setState(950); - antlrcpp::downCast(_localctx)->right = columnExpr(18); + setState(947); + antlrcpp::downCast(_localctx)->right = columnExpr(19); break; } @@ -8385,71 +8432,71 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { _localctx = newContext; newContext->left = previousContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(951); + setState(948); - if (!(precpred(_ctx, 16))) throw FailedPredicateException(this, "precpred(_ctx, 16)"); - setState(976); + if (!(precpred(_ctx, 17))) throw FailedPredicateException(this, "precpred(_ctx, 17)"); + setState(973); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 124, _ctx)) { case 1: { - setState(952); + setState(949); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::EQ_DOUBLE); break; } case 2: { - setState(953); + setState(950); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::EQ_SINGLE); break; } case 3: { - setState(954); + setState(951); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::NOT_EQ); break; } case 4: { - setState(955); + setState(952); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::LT_EQ); break; } case 5: { - setState(956); + setState(953); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::LT); break; } case 6: { - setState(957); + setState(954); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::GT_EQ); break; } case 7: { - setState(958); + setState(955); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::GT); break; } case 8: { - setState(960); + setState(957); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::NOT) { - setState(959); + setState(956); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::NOT); } - setState(962); + setState(959); match(HogQLParser::IN); - setState(964); + setState(961); _errHandler->sync(this); switch (getInterpreter()->adaptivePredict(_input, 122, _ctx)) { case 1: { - setState(963); + setState(960); match(HogQLParser::COHORT); break; } @@ -8461,15 +8508,15 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { } case 9: { - setState(967); + setState(964); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::NOT) { - setState(966); + setState(963); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::NOT); } - setState(969); + setState(966); _la = _input->LA(1); if (!(_la == HogQLParser::ILIKE @@ -8484,37 +8531,37 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { } case 10: { - setState(970); + setState(967); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::REGEX_SINGLE); break; } case 11: { - setState(971); + setState(968); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::REGEX_DOUBLE); break; } case 12: { - setState(972); + setState(969); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::NOT_REGEX); break; } case 13: { - setState(973); + setState(970); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::IREGEX_SINGLE); break; } case 14: { - setState(974); + setState(971); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::IREGEX_DOUBLE); break; } case 15: { - setState(975); + setState(972); antlrcpp::downCast(_localctx)->operator_ = match(HogQLParser::NOT_IREGEX); break; } @@ -8522,8 +8569,8 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { default: break; } - setState(978); - antlrcpp::downCast(_localctx)->right = columnExpr(17); + setState(975); + antlrcpp::downCast(_localctx)->right = columnExpr(18); break; } @@ -8531,13 +8578,13 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(979); + setState(976); - if (!(precpred(_ctx, 14))) throw FailedPredicateException(this, "precpred(_ctx, 14)"); - setState(980); + if (!(precpred(_ctx, 15))) throw FailedPredicateException(this, "precpred(_ctx, 15)"); + setState(977); match(HogQLParser::NULLISH); - setState(981); - columnExpr(15); + setState(978); + columnExpr(16); break; } @@ -8545,13 +8592,13 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(982); + setState(979); - if (!(precpred(_ctx, 12))) throw FailedPredicateException(this, "precpred(_ctx, 12)"); - setState(983); + if (!(precpred(_ctx, 13))) throw FailedPredicateException(this, "precpred(_ctx, 13)"); + setState(980); match(HogQLParser::AND); - setState(984); - columnExpr(13); + setState(981); + columnExpr(14); break; } @@ -8559,13 +8606,13 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(985); + setState(982); - if (!(precpred(_ctx, 11))) throw FailedPredicateException(this, "precpred(_ctx, 11)"); - setState(986); + if (!(precpred(_ctx, 12))) throw FailedPredicateException(this, "precpred(_ctx, 12)"); + setState(983); match(HogQLParser::OR); - setState(987); - columnExpr(12); + setState(984); + columnExpr(13); break; } @@ -8573,25 +8620,25 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(988); + setState(985); - if (!(precpred(_ctx, 10))) throw FailedPredicateException(this, "precpred(_ctx, 10)"); - setState(990); + if (!(precpred(_ctx, 11))) throw FailedPredicateException(this, "precpred(_ctx, 11)"); + setState(987); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::NOT) { - setState(989); + setState(986); match(HogQLParser::NOT); } - setState(992); + setState(989); match(HogQLParser::BETWEEN); - setState(993); + setState(990); columnExpr(0); - setState(994); + setState(991); match(HogQLParser::AND); - setState(995); - columnExpr(11); + setState(992); + columnExpr(12); break; } @@ -8599,333 +8646,203 @@ HogQLParser::ColumnExprContext* HogQLParser::columnExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(997); + setState(994); - if (!(precpred(_ctx, 9))) throw FailedPredicateException(this, "precpred(_ctx, 9)"); - setState(998); + if (!(precpred(_ctx, 10))) throw FailedPredicateException(this, "precpred(_ctx, 10)"); + setState(995); match(HogQLParser::QUERY); - setState(999); + setState(996); columnExpr(0); - setState(1000); + setState(997); match(HogQLParser::COLON); - setState(1001); - columnExpr(9); + setState(998); + columnExpr(10); break; } case 9: { - auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); + auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); + setState(1000); + + if (!(precpred(_ctx, 30))) throw FailedPredicateException(this, "precpred(_ctx, 30)"); + setState(1001); + match(HogQLParser::LPAREN); setState(1003); + _errHandler->sync(this); - if (!(precpred(_ctx, 25))) throw FailedPredicateException(this, "precpred(_ctx, 25)"); - setState(1004); - match(HogQLParser::LBRACKET); + _la = _input->LA(1); + if ((((_la & ~ 0x3fULL) == 0) && + ((1ULL << _la) & -4503602311741442) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && + ((1ULL << (_la - 64)) & 90493036243451903) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && + ((1ULL << (_la - 128)) & 18455) != 0)) { + setState(1002); + columnExprList(); + } setState(1005); - columnExpr(0); - setState(1006); - match(HogQLParser::RBRACKET); + match(HogQLParser::RPAREN); break; } case 10: { - auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); + auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(1008); + setState(1006); - if (!(precpred(_ctx, 24))) throw FailedPredicateException(this, "precpred(_ctx, 24)"); + if (!(precpred(_ctx, 26))) throw FailedPredicateException(this, "precpred(_ctx, 26)"); + setState(1007); + match(HogQLParser::LBRACKET); + setState(1008); + columnExpr(0); setState(1009); - match(HogQLParser::DOT); - setState(1010); - match(HogQLParser::DECIMAL_LITERAL); + match(HogQLParser::RBRACKET); break; } case 11: { - auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); + auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); setState(1011); - if (!(precpred(_ctx, 23))) throw FailedPredicateException(this, "precpred(_ctx, 23)"); + if (!(precpred(_ctx, 25))) throw FailedPredicateException(this, "precpred(_ctx, 25)"); setState(1012); match(HogQLParser::DOT); setState(1013); - identifier(); + match(HogQLParser::DECIMAL_LITERAL); break; } case 12: { - auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); + auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); setState(1014); - if (!(precpred(_ctx, 22))) throw FailedPredicateException(this, "precpred(_ctx, 22)"); + if (!(precpred(_ctx, 24))) throw FailedPredicateException(this, "precpred(_ctx, 24)"); setState(1015); - match(HogQLParser::NULL_PROPERTY); + match(HogQLParser::DOT); setState(1016); - match(HogQLParser::LBRACKET); - setState(1017); - columnExpr(0); - setState(1018); - match(HogQLParser::RBRACKET); + identifier(); break; } case 13: { - auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); + auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(1020); + setState(1017); - if (!(precpred(_ctx, 21))) throw FailedPredicateException(this, "precpred(_ctx, 21)"); - setState(1021); + if (!(precpred(_ctx, 23))) throw FailedPredicateException(this, "precpred(_ctx, 23)"); + setState(1018); match(HogQLParser::NULL_PROPERTY); - setState(1022); - match(HogQLParser::DECIMAL_LITERAL); + setState(1019); + match(HogQLParser::LBRACKET); + setState(1020); + columnExpr(0); + setState(1021); + match(HogQLParser::RBRACKET); break; } case 14: { - auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); + auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); setState(1023); - if (!(precpred(_ctx, 20))) throw FailedPredicateException(this, "precpred(_ctx, 20)"); + if (!(precpred(_ctx, 22))) throw FailedPredicateException(this, "precpred(_ctx, 22)"); setState(1024); match(HogQLParser::NULL_PROPERTY); setState(1025); - identifier(); + match(HogQLParser::DECIMAL_LITERAL); break; } case 15: { - auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); + auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); setState(1026); - if (!(precpred(_ctx, 15))) throw FailedPredicateException(this, "precpred(_ctx, 15)"); + if (!(precpred(_ctx, 21))) throw FailedPredicateException(this, "precpred(_ctx, 21)"); setState(1027); - match(HogQLParser::IS); + match(HogQLParser::NULL_PROPERTY); + setState(1028); + identifier(); + break; + } + + case 16: { + auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); + _localctx = newContext; + pushNewRecursionContext(newContext, startState, RuleColumnExpr); setState(1029); + + if (!(precpred(_ctx, 16))) throw FailedPredicateException(this, "precpred(_ctx, 16)"); + setState(1030); + match(HogQLParser::IS); + setState(1032); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::NOT) { - setState(1028); + setState(1031); match(HogQLParser::NOT); } - setState(1031); + setState(1034); match(HogQLParser::NULL_SQL); break; } - case 16: { + case 17: { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleColumnExpr); - setState(1032); + setState(1035); - if (!(precpred(_ctx, 8))) throw FailedPredicateException(this, "precpred(_ctx, 8)"); - setState(1037); + if (!(precpred(_ctx, 9))) throw FailedPredicateException(this, "precpred(_ctx, 9)"); + setState(1040); _errHandler->sync(this); - switch (getInterpreter()->adaptivePredict(_input, 127, _ctx)) { + switch (getInterpreter()->adaptivePredict(_input, 128, _ctx)) { case 1: { - setState(1033); - match(HogQLParser::AS); - setState(1034); - identifier(); - break; - } - - case 2: { - setState(1035); - match(HogQLParser::AS); setState(1036); - match(HogQLParser::STRING_LITERAL); - break; - } - - default: - break; - } - break; - } - - default: - break; - } - } - setState(1043); - _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 129, _ctx); - } - } - catch (RecognitionException &e) { - _errHandler->reportError(this, e); - _localctx->exception = std::current_exception(); - _errHandler->recover(this, _localctx->exception); - } - return _localctx; -} - -//----------------- ColumnArgListContext ------------------------------------------------------------------ - -HogQLParser::ColumnArgListContext::ColumnArgListContext(ParserRuleContext *parent, size_t invokingState) - : ParserRuleContext(parent, invokingState) { -} - -std::vector HogQLParser::ColumnArgListContext::columnArgExpr() { - return getRuleContexts(); -} - -HogQLParser::ColumnArgExprContext* HogQLParser::ColumnArgListContext::columnArgExpr(size_t i) { - return getRuleContext(i); -} - -std::vector HogQLParser::ColumnArgListContext::COMMA() { - return getTokens(HogQLParser::COMMA); -} - -tree::TerminalNode* HogQLParser::ColumnArgListContext::COMMA(size_t i) { - return getToken(HogQLParser::COMMA, i); -} - - -size_t HogQLParser::ColumnArgListContext::getRuleIndex() const { - return HogQLParser::RuleColumnArgList; -} - - -std::any HogQLParser::ColumnArgListContext::accept(tree::ParseTreeVisitor *visitor) { - if (auto parserVisitor = dynamic_cast(visitor)) - return parserVisitor->visitColumnArgList(this); - else - return visitor->visitChildren(this); -} - -HogQLParser::ColumnArgListContext* HogQLParser::columnArgList() { - ColumnArgListContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 118, HogQLParser::RuleColumnArgList); - size_t _la = 0; - -#if __cplusplus > 201703L - auto onExit = finally([=, this] { -#else - auto onExit = finally([=] { -#endif - exitRule(); - }); - try { - size_t alt; - enterOuterAlt(_localctx, 1); - setState(1044); - columnArgExpr(); - setState(1049); - _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 130, _ctx); - while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { - if (alt == 1) { - setState(1045); - match(HogQLParser::COMMA); - setState(1046); - columnArgExpr(); - } - setState(1051); - _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 130, _ctx); - } - setState(1053); - _errHandler->sync(this); - - _la = _input->LA(1); - if (_la == HogQLParser::COMMA) { - setState(1052); - match(HogQLParser::COMMA); - } - - } - catch (RecognitionException &e) { - _errHandler->reportError(this, e); - _localctx->exception = std::current_exception(); - _errHandler->recover(this, _localctx->exception); - } - - return _localctx; -} - -//----------------- ColumnArgExprContext ------------------------------------------------------------------ - -HogQLParser::ColumnArgExprContext::ColumnArgExprContext(ParserRuleContext *parent, size_t invokingState) - : ParserRuleContext(parent, invokingState) { -} - -HogQLParser::ColumnLambdaExprContext* HogQLParser::ColumnArgExprContext::columnLambdaExpr() { - return getRuleContext(0); -} - -HogQLParser::ColumnExprContext* HogQLParser::ColumnArgExprContext::columnExpr() { - return getRuleContext(0); -} - - -size_t HogQLParser::ColumnArgExprContext::getRuleIndex() const { - return HogQLParser::RuleColumnArgExpr; -} - - -std::any HogQLParser::ColumnArgExprContext::accept(tree::ParseTreeVisitor *visitor) { - if (auto parserVisitor = dynamic_cast(visitor)) - return parserVisitor->visitColumnArgExpr(this); - else - return visitor->visitChildren(this); -} - -HogQLParser::ColumnArgExprContext* HogQLParser::columnArgExpr() { - ColumnArgExprContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 120, HogQLParser::RuleColumnArgExpr); - -#if __cplusplus > 201703L - auto onExit = finally([=, this] { -#else - auto onExit = finally([=] { -#endif - exitRule(); - }); - try { - setState(1057); - _errHandler->sync(this); - switch (getInterpreter()->adaptivePredict(_input, 132, _ctx)) { - case 1: { - enterOuterAlt(_localctx, 1); - setState(1055); - columnLambdaExpr(); - break; - } + match(HogQLParser::AS); + setState(1037); + identifier(); + break; + } - case 2: { - enterOuterAlt(_localctx, 2); - setState(1056); - columnExpr(0); - break; - } + case 2: { + setState(1038); + match(HogQLParser::AS); + setState(1039); + match(HogQLParser::STRING_LITERAL); + break; + } - default: - break; + default: + break; + } + break; + } + + default: + break; + } + } + setState(1046); + _errHandler->sync(this); + alt = getInterpreter()->adaptivePredict(_input, 130, _ctx); } - } catch (RecognitionException &e) { _errHandler->reportError(this, e); _localctx->exception = std::current_exception(); _errHandler->recover(this, _localctx->exception); } - return _localctx; } @@ -8939,10 +8856,6 @@ tree::TerminalNode* HogQLParser::ColumnLambdaExprContext::ARROW() { return getToken(HogQLParser::ARROW, 0); } -HogQLParser::ColumnExprContext* HogQLParser::ColumnLambdaExprContext::columnExpr() { - return getRuleContext(0); -} - tree::TerminalNode* HogQLParser::ColumnLambdaExprContext::LPAREN() { return getToken(HogQLParser::LPAREN, 0); } @@ -8959,6 +8872,14 @@ tree::TerminalNode* HogQLParser::ColumnLambdaExprContext::RPAREN() { return getToken(HogQLParser::RPAREN, 0); } +HogQLParser::ColumnExprContext* HogQLParser::ColumnLambdaExprContext::columnExpr() { + return getRuleContext(0); +} + +HogQLParser::BlockContext* HogQLParser::ColumnLambdaExprContext::block() { + return getRuleContext(0); +} + std::vector HogQLParser::ColumnLambdaExprContext::COMMA() { return getTokens(HogQLParser::COMMA); } @@ -8982,7 +8903,7 @@ std::any HogQLParser::ColumnLambdaExprContext::accept(tree::ParseTreeVisitor *vi HogQLParser::ColumnLambdaExprContext* HogQLParser::columnLambdaExpr() { ColumnLambdaExprContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 122, HogQLParser::RuleColumnLambdaExpr); + enterRule(_localctx, 118, HogQLParser::RuleColumnLambdaExpr); size_t _la = 0; #if __cplusplus > 201703L @@ -8995,169 +8916,100 @@ HogQLParser::ColumnLambdaExprContext* HogQLParser::columnLambdaExpr() { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(1084); + setState(1074); _errHandler->sync(this); - switch (_input->LA(1)) { - case HogQLParser::LPAREN: { - setState(1059); - match(HogQLParser::LPAREN); - setState(1060); - identifier(); - setState(1065); - _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 133, _ctx); - while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { - if (alt == 1) { - setState(1061); - match(HogQLParser::COMMA); - setState(1062); - identifier(); - } - setState(1067); - _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 133, _ctx); + switch (getInterpreter()->adaptivePredict(_input, 135, _ctx)) { + case 1: { + setState(1047); + match(HogQLParser::LPAREN); + setState(1048); + identifier(); + setState(1053); + _errHandler->sync(this); + alt = getInterpreter()->adaptivePredict(_input, 131, _ctx); + while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { + if (alt == 1) { + setState(1049); + match(HogQLParser::COMMA); + setState(1050); + identifier(); } - setState(1069); + setState(1055); _errHandler->sync(this); + alt = getInterpreter()->adaptivePredict(_input, 131, _ctx); + } + setState(1057); + _errHandler->sync(this); - _la = _input->LA(1); - if (_la == HogQLParser::COMMA) { - setState(1068); - match(HogQLParser::COMMA); - } - setState(1071); - match(HogQLParser::RPAREN); - break; + _la = _input->LA(1); + if (_la == HogQLParser::COMMA) { + setState(1056); + match(HogQLParser::COMMA); } + setState(1059); + match(HogQLParser::RPAREN); + break; + } - case HogQLParser::ALL: - case HogQLParser::AND: - case HogQLParser::ANTI: - case HogQLParser::ANY: - case HogQLParser::ARRAY: - case HogQLParser::AS: - case HogQLParser::ASCENDING: - case HogQLParser::ASOF: - case HogQLParser::BETWEEN: - case HogQLParser::BOTH: - case HogQLParser::BY: - case HogQLParser::CASE: - case HogQLParser::CAST: - case HogQLParser::COHORT: - case HogQLParser::COLLATE: - case HogQLParser::CROSS: - case HogQLParser::CUBE: - case HogQLParser::CURRENT: - case HogQLParser::DATE: - case HogQLParser::DAY: - case HogQLParser::DESC: - case HogQLParser::DESCENDING: - case HogQLParser::DISTINCT: - case HogQLParser::ELSE: - case HogQLParser::END: - case HogQLParser::EXTRACT: - case HogQLParser::FINAL: - case HogQLParser::FIRST: - case HogQLParser::FOLLOWING: - case HogQLParser::FOR: - case HogQLParser::FROM: - case HogQLParser::FULL: - case HogQLParser::GROUP: - case HogQLParser::HAVING: - case HogQLParser::HOUR: - case HogQLParser::ID: - case HogQLParser::IF: - case HogQLParser::ILIKE: - case HogQLParser::IN: - case HogQLParser::INNER: - case HogQLParser::INTERVAL: - case HogQLParser::IS: - case HogQLParser::JOIN: - case HogQLParser::KEY: - case HogQLParser::LAST: - case HogQLParser::LEADING: - case HogQLParser::LEFT: - case HogQLParser::LIKE: - case HogQLParser::LIMIT: - case HogQLParser::MINUTE: - case HogQLParser::MONTH: - case HogQLParser::NOT: - case HogQLParser::NULLS: - case HogQLParser::OFFSET: - case HogQLParser::ON: - case HogQLParser::OR: - case HogQLParser::ORDER: - case HogQLParser::OUTER: - case HogQLParser::OVER: - case HogQLParser::PARTITION: - case HogQLParser::PRECEDING: - case HogQLParser::PREWHERE: - case HogQLParser::QUARTER: - case HogQLParser::RANGE: - case HogQLParser::RETURN: - case HogQLParser::RIGHT: - case HogQLParser::ROLLUP: - case HogQLParser::ROW: - case HogQLParser::ROWS: - case HogQLParser::SAMPLE: - case HogQLParser::SECOND: - case HogQLParser::SELECT: - case HogQLParser::SEMI: - case HogQLParser::SETTINGS: - case HogQLParser::SUBSTRING: - case HogQLParser::THEN: - case HogQLParser::TIES: - case HogQLParser::TIMESTAMP: - case HogQLParser::TO: - case HogQLParser::TOP: - case HogQLParser::TOTALS: - case HogQLParser::TRAILING: - case HogQLParser::TRIM: - case HogQLParser::TRUNCATE: - case HogQLParser::UNBOUNDED: - case HogQLParser::UNION: - case HogQLParser::USING: - case HogQLParser::WEEK: - case HogQLParser::WHEN: - case HogQLParser::WHERE: - case HogQLParser::WINDOW: - case HogQLParser::WITH: - case HogQLParser::YEAR: - case HogQLParser::IDENTIFIER: { - setState(1073); - identifier(); - setState(1078); - _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 135, _ctx); - while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { - if (alt == 1) { - setState(1074); - match(HogQLParser::COMMA); - setState(1075); - identifier(); - } - setState(1080); - _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 135, _ctx); + case 2: { + setState(1061); + identifier(); + setState(1066); + _errHandler->sync(this); + alt = getInterpreter()->adaptivePredict(_input, 133, _ctx); + while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { + if (alt == 1) { + setState(1062); + match(HogQLParser::COMMA); + setState(1063); + identifier(); } - setState(1082); + setState(1068); _errHandler->sync(this); + alt = getInterpreter()->adaptivePredict(_input, 133, _ctx); + } + setState(1070); + _errHandler->sync(this); - _la = _input->LA(1); - if (_la == HogQLParser::COMMA) { - setState(1081); - match(HogQLParser::COMMA); - } - break; + _la = _input->LA(1); + if (_la == HogQLParser::COMMA) { + setState(1069); + match(HogQLParser::COMMA); } + break; + } + + case 3: { + setState(1072); + match(HogQLParser::LPAREN); + setState(1073); + match(HogQLParser::RPAREN); + break; + } default: - throw NoViableAltException(this); + break; } - setState(1086); + setState(1076); match(HogQLParser::ARROW); - setState(1087); - columnExpr(0); + setState(1079); + _errHandler->sync(this); + switch (getInterpreter()->adaptivePredict(_input, 136, _ctx)) { + case 1: { + setState(1077); + columnExpr(0); + break; + } + + case 2: { + setState(1078); + block(); + break; + } + + default: + break; + } } catch (RecognitionException &e) { @@ -9284,7 +9136,7 @@ std::any HogQLParser::HogqlxTagElementNestedContext::accept(tree::ParseTreeVisit } HogQLParser::HogqlxTagElementContext* HogQLParser::hogqlxTagElement() { HogqlxTagElementContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 124, HogQLParser::RuleHogqlxTagElement); + enterRule(_localctx, 120, HogQLParser::RuleHogqlxTagElement); size_t _la = 0; #if __cplusplus > 201703L @@ -9295,31 +9147,31 @@ HogQLParser::HogqlxTagElementContext* HogQLParser::hogqlxTagElement() { exitRule(); }); try { - setState(1121); + setState(1113); _errHandler->sync(this); - switch (getInterpreter()->adaptivePredict(_input, 141, _ctx)) { + switch (getInterpreter()->adaptivePredict(_input, 140, _ctx)) { case 1: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 1); - setState(1089); + setState(1081); match(HogQLParser::LT); - setState(1090); + setState(1082); identifier(); - setState(1094); + setState(1086); _errHandler->sync(this); _la = _input->LA(1); while ((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & -725088338784043010) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 3229277487103) != 0)) { - setState(1091); + setState(1083); hogqlxTagAttribute(); - setState(1096); + setState(1088); _errHandler->sync(this); _la = _input->LA(1); } - setState(1097); + setState(1089); match(HogQLParser::SLASH); - setState(1098); + setState(1090); match(HogQLParser::GT); break; } @@ -9327,40 +9179,40 @@ HogQLParser::HogqlxTagElementContext* HogQLParser::hogqlxTagElement() { case 2: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 2); - setState(1100); + setState(1092); match(HogQLParser::LT); - setState(1101); + setState(1093); identifier(); - setState(1105); + setState(1097); _errHandler->sync(this); _la = _input->LA(1); while ((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & -725088338784043010) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 3229277487103) != 0)) { - setState(1102); + setState(1094); hogqlxTagAttribute(); - setState(1107); + setState(1099); _errHandler->sync(this); _la = _input->LA(1); } - setState(1108); + setState(1100); match(HogQLParser::GT); - setState(1114); + setState(1106); _errHandler->sync(this); - switch (getInterpreter()->adaptivePredict(_input, 140, _ctx)) { + switch (getInterpreter()->adaptivePredict(_input, 139, _ctx)) { case 1: { - setState(1109); + setState(1101); hogqlxTagElement(); break; } case 2: { - setState(1110); + setState(1102); match(HogQLParser::LBRACE); - setState(1111); + setState(1103); columnExpr(0); - setState(1112); + setState(1104); match(HogQLParser::RBRACE); break; } @@ -9368,13 +9220,13 @@ HogQLParser::HogqlxTagElementContext* HogQLParser::hogqlxTagElement() { default: break; } - setState(1116); + setState(1108); match(HogQLParser::LT); - setState(1117); + setState(1109); match(HogQLParser::SLASH); - setState(1118); + setState(1110); identifier(); - setState(1119); + setState(1111); match(HogQLParser::GT); break; } @@ -9438,7 +9290,7 @@ std::any HogQLParser::HogqlxTagAttributeContext::accept(tree::ParseTreeVisitor * HogQLParser::HogqlxTagAttributeContext* HogQLParser::hogqlxTagAttribute() { HogqlxTagAttributeContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 126, HogQLParser::RuleHogqlxTagAttribute); + enterRule(_localctx, 122, HogQLParser::RuleHogqlxTagAttribute); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -9448,38 +9300,38 @@ HogQLParser::HogqlxTagAttributeContext* HogQLParser::hogqlxTagAttribute() { exitRule(); }); try { - setState(1134); + setState(1126); _errHandler->sync(this); - switch (getInterpreter()->adaptivePredict(_input, 142, _ctx)) { + switch (getInterpreter()->adaptivePredict(_input, 141, _ctx)) { case 1: { enterOuterAlt(_localctx, 1); - setState(1123); + setState(1115); identifier(); - setState(1124); + setState(1116); match(HogQLParser::EQ_SINGLE); - setState(1125); + setState(1117); string(); break; } case 2: { enterOuterAlt(_localctx, 2); - setState(1127); + setState(1119); identifier(); - setState(1128); + setState(1120); match(HogQLParser::EQ_SINGLE); - setState(1129); + setState(1121); match(HogQLParser::LBRACE); - setState(1130); + setState(1122); columnExpr(0); - setState(1131); + setState(1123); match(HogQLParser::RBRACE); break; } case 3: { enterOuterAlt(_localctx, 3); - setState(1133); + setState(1125); identifier(); break; } @@ -9535,7 +9387,7 @@ std::any HogQLParser::WithExprListContext::accept(tree::ParseTreeVisitor *visito HogQLParser::WithExprListContext* HogQLParser::withExprList() { WithExprListContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 128, HogQLParser::RuleWithExprList); + enterRule(_localctx, 124, HogQLParser::RuleWithExprList); size_t _la = 0; #if __cplusplus > 201703L @@ -9548,28 +9400,28 @@ HogQLParser::WithExprListContext* HogQLParser::withExprList() { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(1136); + setState(1128); withExpr(); - setState(1141); + setState(1133); _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 143, _ctx); + alt = getInterpreter()->adaptivePredict(_input, 142, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { if (alt == 1) { - setState(1137); + setState(1129); match(HogQLParser::COMMA); - setState(1138); + setState(1130); withExpr(); } - setState(1143); + setState(1135); _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 143, _ctx); + alt = getInterpreter()->adaptivePredict(_input, 142, _ctx); } - setState(1145); + setState(1137); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COMMA) { - setState(1144); + setState(1136); match(HogQLParser::COMMA); } @@ -9654,7 +9506,7 @@ std::any HogQLParser::WithExprSubqueryContext::accept(tree::ParseTreeVisitor *vi } HogQLParser::WithExprContext* HogQLParser::withExpr() { WithExprContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 130, HogQLParser::RuleWithExpr); + enterRule(_localctx, 126, HogQLParser::RuleWithExpr); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -9664,21 +9516,21 @@ HogQLParser::WithExprContext* HogQLParser::withExpr() { exitRule(); }); try { - setState(1157); + setState(1149); _errHandler->sync(this); - switch (getInterpreter()->adaptivePredict(_input, 145, _ctx)) { + switch (getInterpreter()->adaptivePredict(_input, 144, _ctx)) { case 1: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 1); - setState(1147); + setState(1139); identifier(); - setState(1148); + setState(1140); match(HogQLParser::AS); - setState(1149); + setState(1141); match(HogQLParser::LPAREN); - setState(1150); + setState(1142); selectUnionStmt(); - setState(1151); + setState(1143); match(HogQLParser::RPAREN); break; } @@ -9686,11 +9538,11 @@ HogQLParser::WithExprContext* HogQLParser::withExpr() { case 2: { _localctx = _tracker.createInstance(_localctx); enterOuterAlt(_localctx, 2); - setState(1153); + setState(1145); columnExpr(0); - setState(1154); + setState(1146); match(HogQLParser::AS); - setState(1155); + setState(1147); identifier(); break; } @@ -9746,7 +9598,7 @@ std::any HogQLParser::ColumnIdentifierContext::accept(tree::ParseTreeVisitor *vi HogQLParser::ColumnIdentifierContext* HogQLParser::columnIdentifier() { ColumnIdentifierContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 132, HogQLParser::RuleColumnIdentifier); + enterRule(_localctx, 128, HogQLParser::RuleColumnIdentifier); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -9756,12 +9608,12 @@ HogQLParser::ColumnIdentifierContext* HogQLParser::columnIdentifier() { exitRule(); }); try { - setState(1166); + setState(1158); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::LBRACE: { enterOuterAlt(_localctx, 1); - setState(1159); + setState(1151); placeholder(); break; } @@ -9861,14 +9713,14 @@ HogQLParser::ColumnIdentifierContext* HogQLParser::columnIdentifier() { case HogQLParser::YEAR: case HogQLParser::IDENTIFIER: { enterOuterAlt(_localctx, 2); - setState(1163); + setState(1155); _errHandler->sync(this); - switch (getInterpreter()->adaptivePredict(_input, 146, _ctx)) { + switch (getInterpreter()->adaptivePredict(_input, 145, _ctx)) { case 1: { - setState(1160); + setState(1152); tableIdentifier(); - setState(1161); + setState(1153); match(HogQLParser::DOT); break; } @@ -9876,7 +9728,7 @@ HogQLParser::ColumnIdentifierContext* HogQLParser::columnIdentifier() { default: break; } - setState(1165); + setState(1157); nestedIdentifier(); break; } @@ -9932,7 +9784,7 @@ std::any HogQLParser::NestedIdentifierContext::accept(tree::ParseTreeVisitor *vi HogQLParser::NestedIdentifierContext* HogQLParser::nestedIdentifier() { NestedIdentifierContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 134, HogQLParser::RuleNestedIdentifier); + enterRule(_localctx, 130, HogQLParser::RuleNestedIdentifier); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -9944,21 +9796,21 @@ HogQLParser::NestedIdentifierContext* HogQLParser::nestedIdentifier() { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(1168); + setState(1160); identifier(); - setState(1173); + setState(1165); _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 148, _ctx); + alt = getInterpreter()->adaptivePredict(_input, 147, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { if (alt == 1) { - setState(1169); + setState(1161); match(HogQLParser::DOT); - setState(1170); + setState(1162); identifier(); } - setState(1175); + setState(1167); _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 148, _ctx); + alt = getInterpreter()->adaptivePredict(_input, 147, _ctx); } } @@ -10107,8 +9959,8 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { HogQLParser::TableExprContext *_localctx = _tracker.createInstance(_ctx, parentState); HogQLParser::TableExprContext *previousContext = _localctx; (void)previousContext; // Silence compiler, in case the context is not used by generated code. - size_t startState = 136; - enterRecursionRule(_localctx, 136, HogQLParser::RuleTableExpr, precedence); + size_t startState = 132; + enterRecursionRule(_localctx, 132, HogQLParser::RuleTableExpr, precedence); @@ -10122,15 +9974,15 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(1185); + setState(1177); _errHandler->sync(this); - switch (getInterpreter()->adaptivePredict(_input, 149, _ctx)) { + switch (getInterpreter()->adaptivePredict(_input, 148, _ctx)) { case 1: { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(1177); + setState(1169); tableIdentifier(); break; } @@ -10139,7 +9991,7 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(1178); + setState(1170); tableFunctionExpr(); break; } @@ -10148,11 +10000,11 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(1179); + setState(1171); match(HogQLParser::LPAREN); - setState(1180); + setState(1172); selectUnionStmt(); - setState(1181); + setState(1173); match(HogQLParser::RPAREN); break; } @@ -10161,7 +10013,7 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(1183); + setState(1175); hogqlxTagElement(); break; } @@ -10170,7 +10022,7 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { _localctx = _tracker.createInstance(_localctx); _ctx = _localctx; previousContext = _localctx; - setState(1184); + setState(1176); placeholder(); break; } @@ -10179,9 +10031,9 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { break; } _ctx->stop = _input->LT(-1); - setState(1195); + setState(1187); _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 151, _ctx); + alt = getInterpreter()->adaptivePredict(_input, 150, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { if (alt == 1) { if (!_parseListeners.empty()) @@ -10190,10 +10042,10 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { auto newContext = _tracker.createInstance(_tracker.createInstance(parentContext, parentState)); _localctx = newContext; pushNewRecursionContext(newContext, startState, RuleTableExpr); - setState(1187); + setState(1179); if (!(precpred(_ctx, 3))) throw FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(1191); + setState(1183); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::DATE: @@ -10201,15 +10053,15 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { case HogQLParser::ID: case HogQLParser::KEY: case HogQLParser::IDENTIFIER: { - setState(1188); + setState(1180); alias(); break; } case HogQLParser::AS: { - setState(1189); + setState(1181); match(HogQLParser::AS); - setState(1190); + setState(1182); identifier(); break; } @@ -10218,9 +10070,9 @@ HogQLParser::TableExprContext* HogQLParser::tableExpr(int precedence) { throw NoViableAltException(this); } } - setState(1197); + setState(1189); _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 151, _ctx); + alt = getInterpreter()->adaptivePredict(_input, 150, _ctx); } } catch (RecognitionException &e) { @@ -10268,7 +10120,7 @@ std::any HogQLParser::TableFunctionExprContext::accept(tree::ParseTreeVisitor *v HogQLParser::TableFunctionExprContext* HogQLParser::tableFunctionExpr() { TableFunctionExprContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 138, HogQLParser::RuleTableFunctionExpr); + enterRule(_localctx, 134, HogQLParser::RuleTableFunctionExpr); size_t _la = 0; #if __cplusplus > 201703L @@ -10280,11 +10132,11 @@ HogQLParser::TableFunctionExprContext* HogQLParser::tableFunctionExpr() { }); try { enterOuterAlt(_localctx, 1); - setState(1198); + setState(1190); identifier(); - setState(1199); + setState(1191); match(HogQLParser::LPAREN); - setState(1201); + setState(1193); _errHandler->sync(this); _la = _input->LA(1); @@ -10292,10 +10144,10 @@ HogQLParser::TableFunctionExprContext* HogQLParser::tableFunctionExpr() { ((1ULL << _la) & -4503602311741442) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 64)) & 90493036243451903) != 0) || ((((_la - 128) & ~ 0x3fULL) == 0) && ((1ULL << (_la - 128)) & 18455) != 0)) { - setState(1200); + setState(1192); tableArgList(); } - setState(1203); + setState(1195); match(HogQLParser::RPAREN); } @@ -10341,7 +10193,7 @@ std::any HogQLParser::TableIdentifierContext::accept(tree::ParseTreeVisitor *vis HogQLParser::TableIdentifierContext* HogQLParser::tableIdentifier() { TableIdentifierContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 140, HogQLParser::RuleTableIdentifier); + enterRule(_localctx, 136, HogQLParser::RuleTableIdentifier); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -10352,14 +10204,14 @@ HogQLParser::TableIdentifierContext* HogQLParser::tableIdentifier() { }); try { enterOuterAlt(_localctx, 1); - setState(1208); + setState(1200); _errHandler->sync(this); - switch (getInterpreter()->adaptivePredict(_input, 153, _ctx)) { + switch (getInterpreter()->adaptivePredict(_input, 152, _ctx)) { case 1: { - setState(1205); + setState(1197); databaseIdentifier(); - setState(1206); + setState(1198); match(HogQLParser::DOT); break; } @@ -10367,7 +10219,7 @@ HogQLParser::TableIdentifierContext* HogQLParser::tableIdentifier() { default: break; } - setState(1210); + setState(1202); identifier(); } @@ -10417,7 +10269,7 @@ std::any HogQLParser::TableArgListContext::accept(tree::ParseTreeVisitor *visito HogQLParser::TableArgListContext* HogQLParser::tableArgList() { TableArgListContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 142, HogQLParser::RuleTableArgList); + enterRule(_localctx, 138, HogQLParser::RuleTableArgList); size_t _la = 0; #if __cplusplus > 201703L @@ -10430,28 +10282,28 @@ HogQLParser::TableArgListContext* HogQLParser::tableArgList() { try { size_t alt; enterOuterAlt(_localctx, 1); - setState(1212); + setState(1204); columnExpr(0); - setState(1217); + setState(1209); _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 154, _ctx); + alt = getInterpreter()->adaptivePredict(_input, 153, _ctx); while (alt != 2 && alt != atn::ATN::INVALID_ALT_NUMBER) { if (alt == 1) { - setState(1213); + setState(1205); match(HogQLParser::COMMA); - setState(1214); + setState(1206); columnExpr(0); } - setState(1219); + setState(1211); _errHandler->sync(this); - alt = getInterpreter()->adaptivePredict(_input, 154, _ctx); + alt = getInterpreter()->adaptivePredict(_input, 153, _ctx); } - setState(1221); + setState(1213); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::COMMA) { - setState(1220); + setState(1212); match(HogQLParser::COMMA); } @@ -10490,7 +10342,7 @@ std::any HogQLParser::DatabaseIdentifierContext::accept(tree::ParseTreeVisitor * HogQLParser::DatabaseIdentifierContext* HogQLParser::databaseIdentifier() { DatabaseIdentifierContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 144, HogQLParser::RuleDatabaseIdentifier); + enterRule(_localctx, 140, HogQLParser::RuleDatabaseIdentifier); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -10501,7 +10353,7 @@ HogQLParser::DatabaseIdentifierContext* HogQLParser::databaseIdentifier() { }); try { enterOuterAlt(_localctx, 1); - setState(1223); + setState(1215); identifier(); } @@ -10555,7 +10407,7 @@ std::any HogQLParser::FloatingLiteralContext::accept(tree::ParseTreeVisitor *vis HogQLParser::FloatingLiteralContext* HogQLParser::floatingLiteral() { FloatingLiteralContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 146, HogQLParser::RuleFloatingLiteral); + enterRule(_localctx, 142, HogQLParser::RuleFloatingLiteral); size_t _la = 0; #if __cplusplus > 201703L @@ -10566,21 +10418,21 @@ HogQLParser::FloatingLiteralContext* HogQLParser::floatingLiteral() { exitRule(); }); try { - setState(1233); + setState(1225); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::FLOATING_LITERAL: { enterOuterAlt(_localctx, 1); - setState(1225); + setState(1217); match(HogQLParser::FLOATING_LITERAL); break; } case HogQLParser::DOT: { enterOuterAlt(_localctx, 2); - setState(1226); + setState(1218); match(HogQLParser::DOT); - setState(1227); + setState(1219); _la = _input->LA(1); if (!(_la == HogQLParser::OCTAL_LITERAL @@ -10596,16 +10448,16 @@ HogQLParser::FloatingLiteralContext* HogQLParser::floatingLiteral() { case HogQLParser::DECIMAL_LITERAL: { enterOuterAlt(_localctx, 3); - setState(1228); + setState(1220); match(HogQLParser::DECIMAL_LITERAL); - setState(1229); + setState(1221); match(HogQLParser::DOT); - setState(1231); + setState(1223); _errHandler->sync(this); - switch (getInterpreter()->adaptivePredict(_input, 156, _ctx)) { + switch (getInterpreter()->adaptivePredict(_input, 155, _ctx)) { case 1: { - setState(1230); + setState(1222); _la = _input->LA(1); if (!(_la == HogQLParser::OCTAL_LITERAL @@ -10692,7 +10544,7 @@ std::any HogQLParser::NumberLiteralContext::accept(tree::ParseTreeVisitor *visit HogQLParser::NumberLiteralContext* HogQLParser::numberLiteral() { NumberLiteralContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 148, HogQLParser::RuleNumberLiteral); + enterRule(_localctx, 144, HogQLParser::RuleNumberLiteral); size_t _la = 0; #if __cplusplus > 201703L @@ -10704,14 +10556,14 @@ HogQLParser::NumberLiteralContext* HogQLParser::numberLiteral() { }); try { enterOuterAlt(_localctx, 1); - setState(1236); + setState(1228); _errHandler->sync(this); _la = _input->LA(1); if (_la == HogQLParser::DASH || _la == HogQLParser::PLUS) { - setState(1235); + setState(1227); _la = _input->LA(1); if (!(_la == HogQLParser::DASH @@ -10723,41 +10575,41 @@ HogQLParser::NumberLiteralContext* HogQLParser::numberLiteral() { consume(); } } - setState(1244); + setState(1236); _errHandler->sync(this); - switch (getInterpreter()->adaptivePredict(_input, 159, _ctx)) { + switch (getInterpreter()->adaptivePredict(_input, 158, _ctx)) { case 1: { - setState(1238); + setState(1230); floatingLiteral(); break; } case 2: { - setState(1239); + setState(1231); match(HogQLParser::OCTAL_LITERAL); break; } case 3: { - setState(1240); + setState(1232); match(HogQLParser::DECIMAL_LITERAL); break; } case 4: { - setState(1241); + setState(1233); match(HogQLParser::HEXADECIMAL_LITERAL); break; } case 5: { - setState(1242); + setState(1234); match(HogQLParser::INF); break; } case 6: { - setState(1243); + setState(1235); match(HogQLParser::NAN_SQL); break; } @@ -10809,7 +10661,7 @@ std::any HogQLParser::LiteralContext::accept(tree::ParseTreeVisitor *visitor) { HogQLParser::LiteralContext* HogQLParser::literal() { LiteralContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 150, HogQLParser::RuleLiteral); + enterRule(_localctx, 146, HogQLParser::RuleLiteral); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -10819,7 +10671,7 @@ HogQLParser::LiteralContext* HogQLParser::literal() { exitRule(); }); try { - setState(1249); + setState(1241); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::INF: @@ -10832,21 +10684,21 @@ HogQLParser::LiteralContext* HogQLParser::literal() { case HogQLParser::DOT: case HogQLParser::PLUS: { enterOuterAlt(_localctx, 1); - setState(1246); + setState(1238); numberLiteral(); break; } case HogQLParser::STRING_LITERAL: { enterOuterAlt(_localctx, 2); - setState(1247); + setState(1239); match(HogQLParser::STRING_LITERAL); break; } case HogQLParser::NULL_SQL: { enterOuterAlt(_localctx, 3); - setState(1248); + setState(1240); match(HogQLParser::NULL_SQL); break; } @@ -10918,7 +10770,7 @@ std::any HogQLParser::IntervalContext::accept(tree::ParseTreeVisitor *visitor) { HogQLParser::IntervalContext* HogQLParser::interval() { IntervalContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 152, HogQLParser::RuleInterval); + enterRule(_localctx, 148, HogQLParser::RuleInterval); size_t _la = 0; #if __cplusplus > 201703L @@ -10930,7 +10782,7 @@ HogQLParser::IntervalContext* HogQLParser::interval() { }); try { enterOuterAlt(_localctx, 1); - setState(1251); + setState(1243); _la = _input->LA(1); if (!((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 108086665936896000) != 0) || ((((_la - 70) & ~ 0x3fULL) == 0) && @@ -11313,7 +11165,7 @@ std::any HogQLParser::KeywordContext::accept(tree::ParseTreeVisitor *visitor) { HogQLParser::KeywordContext* HogQLParser::keyword() { KeywordContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 154, HogQLParser::RuleKeyword); + enterRule(_localctx, 150, HogQLParser::RuleKeyword); size_t _la = 0; #if __cplusplus > 201703L @@ -11325,7 +11177,7 @@ HogQLParser::KeywordContext* HogQLParser::keyword() { }); try { enterOuterAlt(_localctx, 1); - setState(1253); + setState(1245); _la = _input->LA(1); if (!((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & -833175004720939010) != 0) || ((((_la - 64) & ~ 0x3fULL) == 0) && @@ -11384,7 +11236,7 @@ std::any HogQLParser::KeywordForAliasContext::accept(tree::ParseTreeVisitor *vis HogQLParser::KeywordForAliasContext* HogQLParser::keywordForAlias() { KeywordForAliasContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 156, HogQLParser::RuleKeywordForAlias); + enterRule(_localctx, 152, HogQLParser::RuleKeywordForAlias); size_t _la = 0; #if __cplusplus > 201703L @@ -11396,7 +11248,7 @@ HogQLParser::KeywordForAliasContext* HogQLParser::keywordForAlias() { }); try { enterOuterAlt(_localctx, 1); - setState(1255); + setState(1247); _la = _input->LA(1); if (!((((_la & ~ 0x3fULL) == 0) && ((1ULL << _la) & 282025807314944) != 0))) { @@ -11446,7 +11298,7 @@ std::any HogQLParser::AliasContext::accept(tree::ParseTreeVisitor *visitor) { HogQLParser::AliasContext* HogQLParser::alias() { AliasContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 158, HogQLParser::RuleAlias); + enterRule(_localctx, 154, HogQLParser::RuleAlias); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -11456,12 +11308,12 @@ HogQLParser::AliasContext* HogQLParser::alias() { exitRule(); }); try { - setState(1259); + setState(1251); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::IDENTIFIER: { enterOuterAlt(_localctx, 1); - setState(1257); + setState(1249); match(HogQLParser::IDENTIFIER); break; } @@ -11471,7 +11323,7 @@ HogQLParser::AliasContext* HogQLParser::alias() { case HogQLParser::ID: case HogQLParser::KEY: { enterOuterAlt(_localctx, 2); - setState(1258); + setState(1250); keywordForAlias(); break; } @@ -11523,7 +11375,7 @@ std::any HogQLParser::IdentifierContext::accept(tree::ParseTreeVisitor *visitor) HogQLParser::IdentifierContext* HogQLParser::identifier() { IdentifierContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 160, HogQLParser::RuleIdentifier); + enterRule(_localctx, 156, HogQLParser::RuleIdentifier); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -11533,12 +11385,12 @@ HogQLParser::IdentifierContext* HogQLParser::identifier() { exitRule(); }); try { - setState(1264); + setState(1256); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::IDENTIFIER: { enterOuterAlt(_localctx, 1); - setState(1261); + setState(1253); match(HogQLParser::IDENTIFIER); break; } @@ -11552,7 +11404,7 @@ HogQLParser::IdentifierContext* HogQLParser::identifier() { case HogQLParser::WEEK: case HogQLParser::YEAR: { enterOuterAlt(_localctx, 2); - setState(1262); + setState(1254); interval(); break; } @@ -11643,7 +11495,7 @@ HogQLParser::IdentifierContext* HogQLParser::identifier() { case HogQLParser::WINDOW: case HogQLParser::WITH: { enterOuterAlt(_localctx, 3); - setState(1263); + setState(1255); keyword(); break; } @@ -11695,7 +11547,7 @@ std::any HogQLParser::EnumValueContext::accept(tree::ParseTreeVisitor *visitor) HogQLParser::EnumValueContext* HogQLParser::enumValue() { EnumValueContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 162, HogQLParser::RuleEnumValue); + enterRule(_localctx, 158, HogQLParser::RuleEnumValue); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -11706,11 +11558,11 @@ HogQLParser::EnumValueContext* HogQLParser::enumValue() { }); try { enterOuterAlt(_localctx, 1); - setState(1266); + setState(1258); string(); - setState(1267); + setState(1259); match(HogQLParser::EQ_SINGLE); - setState(1268); + setState(1260); numberLiteral(); } @@ -11756,7 +11608,7 @@ std::any HogQLParser::PlaceholderContext::accept(tree::ParseTreeVisitor *visitor HogQLParser::PlaceholderContext* HogQLParser::placeholder() { PlaceholderContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 164, HogQLParser::RulePlaceholder); + enterRule(_localctx, 160, HogQLParser::RulePlaceholder); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -11767,11 +11619,11 @@ HogQLParser::PlaceholderContext* HogQLParser::placeholder() { }); try { enterOuterAlt(_localctx, 1); - setState(1270); + setState(1262); match(HogQLParser::LBRACE); - setState(1271); + setState(1263); nestedIdentifier(); - setState(1272); + setState(1264); match(HogQLParser::RBRACE); } @@ -11813,7 +11665,7 @@ std::any HogQLParser::StringContext::accept(tree::ParseTreeVisitor *visitor) { HogQLParser::StringContext* HogQLParser::string() { StringContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 166, HogQLParser::RuleString); + enterRule(_localctx, 162, HogQLParser::RuleString); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -11823,19 +11675,19 @@ HogQLParser::StringContext* HogQLParser::string() { exitRule(); }); try { - setState(1276); + setState(1268); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::STRING_LITERAL: { enterOuterAlt(_localctx, 1); - setState(1274); + setState(1266); match(HogQLParser::STRING_LITERAL); break; } case HogQLParser::QUOTE_SINGLE_TEMPLATE: { enterOuterAlt(_localctx, 2); - setState(1275); + setState(1267); templateString(); break; } @@ -11891,7 +11743,7 @@ std::any HogQLParser::TemplateStringContext::accept(tree::ParseTreeVisitor *visi HogQLParser::TemplateStringContext* HogQLParser::templateString() { TemplateStringContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 168, HogQLParser::RuleTemplateString); + enterRule(_localctx, 164, HogQLParser::RuleTemplateString); size_t _la = 0; #if __cplusplus > 201703L @@ -11903,21 +11755,21 @@ HogQLParser::TemplateStringContext* HogQLParser::templateString() { }); try { enterOuterAlt(_localctx, 1); - setState(1278); + setState(1270); match(HogQLParser::QUOTE_SINGLE_TEMPLATE); - setState(1282); + setState(1274); _errHandler->sync(this); _la = _input->LA(1); while (_la == HogQLParser::STRING_TEXT || _la == HogQLParser::STRING_ESCAPE_TRIGGER) { - setState(1279); + setState(1271); stringContents(); - setState(1284); + setState(1276); _errHandler->sync(this); _la = _input->LA(1); } - setState(1285); + setState(1277); match(HogQLParser::QUOTE_SINGLE); } @@ -11967,7 +11819,7 @@ std::any HogQLParser::StringContentsContext::accept(tree::ParseTreeVisitor *visi HogQLParser::StringContentsContext* HogQLParser::stringContents() { StringContentsContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 170, HogQLParser::RuleStringContents); + enterRule(_localctx, 166, HogQLParser::RuleStringContents); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -11977,23 +11829,23 @@ HogQLParser::StringContentsContext* HogQLParser::stringContents() { exitRule(); }); try { - setState(1292); + setState(1284); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::STRING_ESCAPE_TRIGGER: { enterOuterAlt(_localctx, 1); - setState(1287); + setState(1279); match(HogQLParser::STRING_ESCAPE_TRIGGER); - setState(1288); + setState(1280); columnExpr(0); - setState(1289); + setState(1281); match(HogQLParser::RBRACE); break; } case HogQLParser::STRING_TEXT: { enterOuterAlt(_localctx, 2); - setState(1291); + setState(1283); match(HogQLParser::STRING_TEXT); break; } @@ -12049,7 +11901,7 @@ std::any HogQLParser::FullTemplateStringContext::accept(tree::ParseTreeVisitor * HogQLParser::FullTemplateStringContext* HogQLParser::fullTemplateString() { FullTemplateStringContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 172, HogQLParser::RuleFullTemplateString); + enterRule(_localctx, 168, HogQLParser::RuleFullTemplateString); size_t _la = 0; #if __cplusplus > 201703L @@ -12061,21 +11913,21 @@ HogQLParser::FullTemplateStringContext* HogQLParser::fullTemplateString() { }); try { enterOuterAlt(_localctx, 1); - setState(1294); + setState(1286); match(HogQLParser::QUOTE_SINGLE_TEMPLATE_FULL); - setState(1298); + setState(1290); _errHandler->sync(this); _la = _input->LA(1); while (_la == HogQLParser::FULL_STRING_TEXT || _la == HogQLParser::FULL_STRING_ESCAPE_TRIGGER) { - setState(1295); + setState(1287); stringContentsFull(); - setState(1300); + setState(1292); _errHandler->sync(this); _la = _input->LA(1); } - setState(1301); + setState(1293); match(HogQLParser::EOF); } @@ -12125,7 +11977,7 @@ std::any HogQLParser::StringContentsFullContext::accept(tree::ParseTreeVisitor * HogQLParser::StringContentsFullContext* HogQLParser::stringContentsFull() { StringContentsFullContext *_localctx = _tracker.createInstance(_ctx, getState()); - enterRule(_localctx, 174, HogQLParser::RuleStringContentsFull); + enterRule(_localctx, 170, HogQLParser::RuleStringContentsFull); #if __cplusplus > 201703L auto onExit = finally([=, this] { @@ -12135,23 +11987,23 @@ HogQLParser::StringContentsFullContext* HogQLParser::stringContentsFull() { exitRule(); }); try { - setState(1308); + setState(1300); _errHandler->sync(this); switch (_input->LA(1)) { case HogQLParser::FULL_STRING_ESCAPE_TRIGGER: { enterOuterAlt(_localctx, 1); - setState(1303); + setState(1295); match(HogQLParser::FULL_STRING_ESCAPE_TRIGGER); - setState(1304); + setState(1296); columnExpr(0); - setState(1305); + setState(1297); match(HogQLParser::RBRACE); break; } case HogQLParser::FULL_STRING_TEXT: { enterOuterAlt(_localctx, 2); - setState(1307); + setState(1299); match(HogQLParser::FULL_STRING_TEXT); break; } @@ -12174,7 +12026,7 @@ bool HogQLParser::sempred(RuleContext *context, size_t ruleIndex, size_t predica switch (ruleIndex) { case 39: return joinExprSempred(antlrcpp::downCast(context), predicateIndex); case 58: return columnExprSempred(antlrcpp::downCast(context), predicateIndex); - case 68: return tableExprSempred(antlrcpp::downCast(context), predicateIndex); + case 66: return tableExprSempred(antlrcpp::downCast(context), predicateIndex); default: break; @@ -12195,22 +12047,23 @@ bool HogQLParser::joinExprSempred(JoinExprContext *_localctx, size_t predicateIn bool HogQLParser::columnExprSempred(ColumnExprContext *_localctx, size_t predicateIndex) { switch (predicateIndex) { - case 2: return precpred(_ctx, 18); - case 3: return precpred(_ctx, 17); - case 4: return precpred(_ctx, 16); - case 5: return precpred(_ctx, 14); - case 6: return precpred(_ctx, 12); - case 7: return precpred(_ctx, 11); - case 8: return precpred(_ctx, 10); - case 9: return precpred(_ctx, 9); - case 10: return precpred(_ctx, 25); - case 11: return precpred(_ctx, 24); - case 12: return precpred(_ctx, 23); - case 13: return precpred(_ctx, 22); - case 14: return precpred(_ctx, 21); - case 15: return precpred(_ctx, 20); - case 16: return precpred(_ctx, 15); - case 17: return precpred(_ctx, 8); + case 2: return precpred(_ctx, 19); + case 3: return precpred(_ctx, 18); + case 4: return precpred(_ctx, 17); + case 5: return precpred(_ctx, 15); + case 6: return precpred(_ctx, 13); + case 7: return precpred(_ctx, 12); + case 8: return precpred(_ctx, 11); + case 9: return precpred(_ctx, 10); + case 10: return precpred(_ctx, 30); + case 11: return precpred(_ctx, 26); + case 12: return precpred(_ctx, 25); + case 13: return precpred(_ctx, 24); + case 14: return precpred(_ctx, 23); + case 15: return precpred(_ctx, 22); + case 16: return precpred(_ctx, 21); + case 17: return precpred(_ctx, 16); + case 18: return precpred(_ctx, 9); default: break; @@ -12220,7 +12073,7 @@ bool HogQLParser::columnExprSempred(ColumnExprContext *_localctx, size_t predica bool HogQLParser::tableExprSempred(TableExprContext *_localctx, size_t predicateIndex) { switch (predicateIndex) { - case 18: return precpred(_ctx, 3); + case 19: return precpred(_ctx, 3); default: break; diff --git a/hogql_parser/HogQLParser.h b/hogql_parser/HogQLParser.h index acabb96a0baa5..a96c1ee007954 100644 --- a/hogql_parser/HogQLParser.h +++ b/hogql_parser/HogQLParser.h @@ -1,5 +1,5 @@ -// Generated from HogQLParser.g4 by ANTLR 4.13.1 +// Generated from HogQLParser.g4 by ANTLR 4.13.2 #pragma once @@ -61,16 +61,15 @@ class HogQLParser : public antlr4::Parser { RuleSettingExprList = 47, RuleSettingExpr = 48, RuleWindowExpr = 49, RuleWinPartitionByClause = 50, RuleWinOrderByClause = 51, RuleWinFrameClause = 52, RuleWinFrameExtend = 53, RuleWinFrameBound = 54, RuleExpr = 55, RuleColumnTypeExpr = 56, - RuleColumnExprList = 57, RuleColumnExpr = 58, RuleColumnArgList = 59, - RuleColumnArgExpr = 60, RuleColumnLambdaExpr = 61, RuleHogqlxTagElement = 62, - RuleHogqlxTagAttribute = 63, RuleWithExprList = 64, RuleWithExpr = 65, - RuleColumnIdentifier = 66, RuleNestedIdentifier = 67, RuleTableExpr = 68, - RuleTableFunctionExpr = 69, RuleTableIdentifier = 70, RuleTableArgList = 71, - RuleDatabaseIdentifier = 72, RuleFloatingLiteral = 73, RuleNumberLiteral = 74, - RuleLiteral = 75, RuleInterval = 76, RuleKeyword = 77, RuleKeywordForAlias = 78, - RuleAlias = 79, RuleIdentifier = 80, RuleEnumValue = 81, RulePlaceholder = 82, - RuleString = 83, RuleTemplateString = 84, RuleStringContents = 85, RuleFullTemplateString = 86, - RuleStringContentsFull = 87 + RuleColumnExprList = 57, RuleColumnExpr = 58, RuleColumnLambdaExpr = 59, + RuleHogqlxTagElement = 60, RuleHogqlxTagAttribute = 61, RuleWithExprList = 62, + RuleWithExpr = 63, RuleColumnIdentifier = 64, RuleNestedIdentifier = 65, + RuleTableExpr = 66, RuleTableFunctionExpr = 67, RuleTableIdentifier = 68, + RuleTableArgList = 69, RuleDatabaseIdentifier = 70, RuleFloatingLiteral = 71, + RuleNumberLiteral = 72, RuleLiteral = 73, RuleInterval = 74, RuleKeyword = 75, + RuleKeywordForAlias = 76, RuleAlias = 77, RuleIdentifier = 78, RuleEnumValue = 79, + RulePlaceholder = 80, RuleString = 81, RuleTemplateString = 82, RuleStringContents = 83, + RuleFullTemplateString = 84, RuleStringContentsFull = 85 }; explicit HogQLParser(antlr4::TokenStream *input); @@ -149,8 +148,6 @@ class HogQLParser : public antlr4::Parser { class ColumnTypeExprContext; class ColumnExprListContext; class ColumnExprContext; - class ColumnArgListContext; - class ColumnArgExprContext; class ColumnLambdaExprContext; class HogqlxTagElementContext; class HogqlxTagAttributeContext; @@ -1567,6 +1564,8 @@ class HogQLParser : public antlr4::Parser { public: ColumnExprWinFunctionTargetContext(ColumnExprContext *ctx); + HogQLParser::ColumnExprListContext *columnExprs = nullptr; + HogQLParser::ColumnExprListContext *columnArgList = nullptr; std::vector identifier(); IdentifierContext* identifier(size_t i); antlr4::tree::TerminalNode *OVER(); @@ -1574,9 +1573,9 @@ class HogQLParser : public antlr4::Parser { antlr4::tree::TerminalNode* LPAREN(size_t i); std::vector RPAREN(); antlr4::tree::TerminalNode* RPAREN(size_t i); - ColumnExprListContext *columnExprList(); + std::vector columnExprList(); + ColumnExprListContext* columnExprList(size_t i); antlr4::tree::TerminalNode *DISTINCT(); - ColumnArgListContext *columnArgList(); virtual std::any accept(antlr4::tree::ParseTreeVisitor *visitor) override; }; @@ -1638,6 +1637,18 @@ class HogQLParser : public antlr4::Parser { virtual std::any accept(antlr4::tree::ParseTreeVisitor *visitor) override; }; + class ColumnExprCallContext : public ColumnExprContext { + public: + ColumnExprCallContext(ColumnExprContext *ctx); + + ColumnExprContext *columnExpr(); + antlr4::tree::TerminalNode *LPAREN(); + antlr4::tree::TerminalNode *RPAREN(); + ColumnExprListContext *columnExprList(); + + virtual std::any accept(antlr4::tree::ParseTreeVisitor *visitor) override; + }; + class ColumnExprArrayAccessContext : public ColumnExprContext { public: ColumnExprArrayAccessContext(ColumnExprContext *ctx); @@ -1786,6 +1797,8 @@ class HogQLParser : public antlr4::Parser { public: ColumnExprWinFunctionContext(ColumnExprContext *ctx); + HogQLParser::ColumnExprListContext *columnExprs = nullptr; + HogQLParser::ColumnExprListContext *columnArgList = nullptr; IdentifierContext *identifier(); antlr4::tree::TerminalNode *OVER(); std::vector LPAREN(); @@ -1793,9 +1806,18 @@ class HogQLParser : public antlr4::Parser { WindowExprContext *windowExpr(); std::vector RPAREN(); antlr4::tree::TerminalNode* RPAREN(size_t i); - ColumnExprListContext *columnExprList(); + std::vector columnExprList(); + ColumnExprListContext* columnExprList(size_t i); antlr4::tree::TerminalNode *DISTINCT(); - ColumnArgListContext *columnArgList(); + + virtual std::any accept(antlr4::tree::ParseTreeVisitor *visitor) override; + }; + + class ColumnExprLambdaContext : public ColumnExprContext { + public: + ColumnExprLambdaContext(ColumnExprContext *ctx); + + ColumnLambdaExprContext *columnLambdaExpr(); virtual std::any accept(antlr4::tree::ParseTreeVisitor *visitor) override; }; @@ -1813,14 +1835,16 @@ class HogQLParser : public antlr4::Parser { public: ColumnExprFunctionContext(ColumnExprContext *ctx); + HogQLParser::ColumnExprListContext *columnExprs = nullptr; + HogQLParser::ColumnExprListContext *columnArgList = nullptr; IdentifierContext *identifier(); std::vector LPAREN(); antlr4::tree::TerminalNode* LPAREN(size_t i); std::vector RPAREN(); antlr4::tree::TerminalNode* RPAREN(size_t i); antlr4::tree::TerminalNode *DISTINCT(); - ColumnArgListContext *columnArgList(); - ColumnExprListContext *columnExprList(); + std::vector columnExprList(); + ColumnExprListContext* columnExprList(size_t i); virtual std::any accept(antlr4::tree::ParseTreeVisitor *visitor) override; }; @@ -1838,46 +1862,17 @@ class HogQLParser : public antlr4::Parser { ColumnExprContext* columnExpr(); ColumnExprContext* columnExpr(int precedence); - class ColumnArgListContext : public antlr4::ParserRuleContext { - public: - ColumnArgListContext(antlr4::ParserRuleContext *parent, size_t invokingState); - virtual size_t getRuleIndex() const override; - std::vector columnArgExpr(); - ColumnArgExprContext* columnArgExpr(size_t i); - std::vector COMMA(); - antlr4::tree::TerminalNode* COMMA(size_t i); - - - virtual std::any accept(antlr4::tree::ParseTreeVisitor *visitor) override; - - }; - - ColumnArgListContext* columnArgList(); - - class ColumnArgExprContext : public antlr4::ParserRuleContext { - public: - ColumnArgExprContext(antlr4::ParserRuleContext *parent, size_t invokingState); - virtual size_t getRuleIndex() const override; - ColumnLambdaExprContext *columnLambdaExpr(); - ColumnExprContext *columnExpr(); - - - virtual std::any accept(antlr4::tree::ParseTreeVisitor *visitor) override; - - }; - - ColumnArgExprContext* columnArgExpr(); - class ColumnLambdaExprContext : public antlr4::ParserRuleContext { public: ColumnLambdaExprContext(antlr4::ParserRuleContext *parent, size_t invokingState); virtual size_t getRuleIndex() const override; antlr4::tree::TerminalNode *ARROW(); - ColumnExprContext *columnExpr(); antlr4::tree::TerminalNode *LPAREN(); std::vector identifier(); IdentifierContext* identifier(size_t i); antlr4::tree::TerminalNode *RPAREN(); + ColumnExprContext *columnExpr(); + BlockContext *block(); std::vector COMMA(); antlr4::tree::TerminalNode* COMMA(size_t i); diff --git a/hogql_parser/HogQLParser.interp b/hogql_parser/HogQLParser.interp index 183059ec0fd79..b965cfbb577c7 100644 --- a/hogql_parser/HogQLParser.interp +++ b/hogql_parser/HogQLParser.interp @@ -382,8 +382,6 @@ expr columnTypeExpr columnExprList columnExpr -columnArgList -columnArgExpr columnLambdaExpr hogqlxTagElement hogqlxTagAttribute @@ -414,4 +412,4 @@ stringContentsFull atn: -[4, 1, 159, 1311, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 1, 0, 5, 0, 178, 8, 0, 10, 0, 12, 0, 181, 9, 0, 1, 0, 1, 0, 1, 1, 1, 1, 3, 1, 187, 8, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 196, 8, 3, 1, 4, 1, 4, 1, 4, 5, 4, 201, 8, 4, 10, 4, 12, 4, 204, 9, 4, 1, 4, 3, 4, 207, 8, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 221, 8, 5, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 6, 3, 6, 228, 8, 6, 1, 7, 1, 7, 3, 7, 232, 8, 7, 1, 7, 3, 7, 235, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 242, 8, 8, 1, 8, 1, 8, 3, 8, 246, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 5, 9, 253, 8, 9, 10, 9, 12, 9, 256, 9, 9, 1, 9, 1, 9, 3, 9, 260, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 269, 8, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 277, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 3, 12, 284, 8, 12, 1, 12, 1, 12, 3, 12, 288, 8, 12, 1, 12, 1, 12, 1, 12, 1, 12, 3, 12, 294, 8, 12, 1, 12, 1, 12, 1, 12, 3, 12, 299, 8, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 307, 8, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 314, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 320, 8, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 3, 16, 332, 8, 16, 1, 17, 1, 17, 1, 18, 1, 18, 5, 18, 338, 8, 18, 10, 18, 12, 18, 341, 9, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 5, 20, 352, 8, 20, 10, 20, 12, 20, 355, 9, 20, 1, 20, 3, 20, 358, 8, 20, 1, 21, 1, 21, 1, 21, 3, 21, 363, 8, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 371, 8, 22, 10, 22, 12, 22, 374, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 3, 23, 382, 8, 23, 1, 24, 3, 24, 385, 8, 24, 1, 24, 1, 24, 3, 24, 389, 8, 24, 1, 24, 3, 24, 392, 8, 24, 1, 24, 1, 24, 3, 24, 396, 8, 24, 1, 24, 3, 24, 399, 8, 24, 1, 24, 3, 24, 402, 8, 24, 1, 24, 3, 24, 405, 8, 24, 1, 24, 3, 24, 408, 8, 24, 1, 24, 1, 24, 3, 24, 412, 8, 24, 1, 24, 1, 24, 3, 24, 416, 8, 24, 1, 24, 3, 24, 419, 8, 24, 1, 24, 3, 24, 422, 8, 24, 1, 24, 3, 24, 425, 8, 24, 1, 24, 1, 24, 3, 24, 429, 8, 24, 1, 24, 3, 24, 432, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 441, 8, 26, 1, 27, 1, 27, 1, 27, 1, 28, 3, 28, 447, 8, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 466, 8, 29, 10, 29, 12, 29, 469, 9, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 485, 8, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 502, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 508, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 514, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 525, 8, 36, 3, 36, 527, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 3, 39, 538, 8, 39, 1, 39, 3, 39, 541, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 547, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 555, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 561, 8, 39, 10, 39, 12, 39, 564, 9, 39, 1, 40, 3, 40, 567, 8, 40, 1, 40, 1, 40, 1, 40, 3, 40, 572, 8, 40, 1, 40, 3, 40, 575, 8, 40, 1, 40, 3, 40, 578, 8, 40, 1, 40, 1, 40, 3, 40, 582, 8, 40, 1, 40, 1, 40, 3, 40, 586, 8, 40, 1, 40, 3, 40, 589, 8, 40, 3, 40, 591, 8, 40, 1, 40, 3, 40, 594, 8, 40, 1, 40, 1, 40, 3, 40, 598, 8, 40, 1, 40, 1, 40, 3, 40, 602, 8, 40, 1, 40, 3, 40, 605, 8, 40, 3, 40, 607, 8, 40, 3, 40, 609, 8, 40, 1, 41, 1, 41, 1, 41, 3, 41, 614, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 625, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 3, 43, 631, 8, 43, 1, 44, 1, 44, 1, 44, 5, 44, 636, 8, 44, 10, 44, 12, 44, 639, 9, 44, 1, 45, 1, 45, 3, 45, 643, 8, 45, 1, 45, 1, 45, 3, 45, 647, 8, 45, 1, 45, 1, 45, 3, 45, 651, 8, 45, 1, 46, 1, 46, 1, 46, 1, 46, 3, 46, 657, 8, 46, 3, 46, 659, 8, 46, 1, 47, 1, 47, 1, 47, 5, 47, 664, 8, 47, 10, 47, 12, 47, 667, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 3, 49, 674, 8, 49, 1, 49, 3, 49, 677, 8, 49, 1, 49, 3, 49, 680, 8, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 699, 8, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 3, 54, 713, 8, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 727, 8, 56, 10, 56, 12, 56, 730, 9, 56, 1, 56, 3, 56, 733, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 742, 8, 56, 10, 56, 12, 56, 745, 9, 56, 1, 56, 3, 56, 748, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 757, 8, 56, 10, 56, 12, 56, 760, 9, 56, 1, 56, 3, 56, 763, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 770, 8, 56, 1, 56, 1, 56, 3, 56, 774, 8, 56, 1, 57, 1, 57, 1, 57, 5, 57, 779, 8, 57, 10, 57, 12, 57, 782, 9, 57, 1, 57, 3, 57, 785, 8, 57, 1, 58, 1, 58, 1, 58, 3, 58, 790, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 4, 58, 797, 8, 58, 11, 58, 12, 58, 798, 1, 58, 1, 58, 3, 58, 803, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 827, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 844, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 850, 8, 58, 1, 58, 3, 58, 853, 8, 58, 1, 58, 3, 58, 856, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 866, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 872, 8, 58, 1, 58, 3, 58, 875, 8, 58, 1, 58, 3, 58, 878, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 886, 8, 58, 1, 58, 3, 58, 889, 8, 58, 1, 58, 1, 58, 3, 58, 893, 8, 58, 1, 58, 3, 58, 896, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 910, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 927, 8, 58, 1, 58, 1, 58, 1, 58, 3, 58, 932, 8, 58, 1, 58, 1, 58, 3, 58, 936, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 942, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 949, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 961, 8, 58, 1, 58, 1, 58, 3, 58, 965, 8, 58, 1, 58, 3, 58, 968, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 977, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 991, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 1030, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 1038, 8, 58, 5, 58, 1040, 8, 58, 10, 58, 12, 58, 1043, 9, 58, 1, 59, 1, 59, 1, 59, 5, 59, 1048, 8, 59, 10, 59, 12, 59, 1051, 9, 59, 1, 59, 3, 59, 1054, 8, 59, 1, 60, 1, 60, 3, 60, 1058, 8, 60, 1, 61, 1, 61, 1, 61, 1, 61, 5, 61, 1064, 8, 61, 10, 61, 12, 61, 1067, 9, 61, 1, 61, 3, 61, 1070, 8, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 5, 61, 1077, 8, 61, 10, 61, 12, 61, 1080, 9, 61, 1, 61, 3, 61, 1083, 8, 61, 3, 61, 1085, 8, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 5, 62, 1093, 8, 62, 10, 62, 12, 62, 1096, 9, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 5, 62, 1104, 8, 62, 10, 62, 12, 62, 1107, 9, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 3, 62, 1115, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 3, 62, 1122, 8, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 3, 63, 1135, 8, 63, 1, 64, 1, 64, 1, 64, 5, 64, 1140, 8, 64, 10, 64, 12, 64, 1143, 9, 64, 1, 64, 3, 64, 1146, 8, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 3, 65, 1158, 8, 65, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 1164, 8, 66, 1, 66, 3, 66, 1167, 8, 66, 1, 67, 1, 67, 1, 67, 5, 67, 1172, 8, 67, 10, 67, 12, 67, 1175, 9, 67, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 3, 68, 1186, 8, 68, 1, 68, 1, 68, 1, 68, 1, 68, 3, 68, 1192, 8, 68, 5, 68, 1194, 8, 68, 10, 68, 12, 68, 1197, 9, 68, 1, 69, 1, 69, 1, 69, 3, 69, 1202, 8, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 3, 70, 1209, 8, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 5, 71, 1216, 8, 71, 10, 71, 12, 71, 1219, 9, 71, 1, 71, 3, 71, 1222, 8, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 3, 73, 1232, 8, 73, 3, 73, 1234, 8, 73, 1, 74, 3, 74, 1237, 8, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 3, 74, 1245, 8, 74, 1, 75, 1, 75, 1, 75, 3, 75, 1250, 8, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 3, 79, 1260, 8, 79, 1, 80, 1, 80, 1, 80, 3, 80, 1265, 8, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 3, 83, 1277, 8, 83, 1, 84, 1, 84, 5, 84, 1281, 8, 84, 10, 84, 12, 84, 1284, 9, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 3, 85, 1293, 8, 85, 1, 86, 1, 86, 5, 86, 1297, 8, 86, 10, 86, 12, 86, 1300, 9, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 3, 87, 1309, 8, 87, 1, 87, 0, 3, 78, 116, 136, 88, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 172, 174, 0, 16, 2, 0, 18, 18, 74, 74, 2, 0, 44, 44, 51, 51, 3, 0, 1, 1, 4, 4, 8, 8, 4, 0, 1, 1, 3, 4, 8, 8, 80, 80, 2, 0, 51, 51, 73, 73, 2, 0, 1, 1, 4, 4, 2, 0, 7, 7, 22, 23, 2, 0, 30, 30, 49, 49, 2, 0, 71, 71, 76, 76, 3, 0, 10, 10, 50, 50, 90, 90, 2, 0, 41, 41, 53, 53, 1, 0, 107, 108, 2, 0, 118, 118, 139, 139, 7, 0, 21, 21, 38, 38, 55, 56, 70, 70, 78, 78, 97, 97, 103, 103, 16, 0, 1, 13, 15, 20, 22, 28, 30, 30, 32, 37, 39, 42, 44, 51, 53, 54, 58, 58, 60, 69, 71, 77, 79, 83, 85, 92, 94, 96, 98, 99, 101, 102, 4, 0, 20, 20, 30, 30, 39, 39, 48, 48, 1479, 0, 179, 1, 0, 0, 0, 2, 186, 1, 0, 0, 0, 4, 188, 1, 0, 0, 0, 6, 190, 1, 0, 0, 0, 8, 197, 1, 0, 0, 0, 10, 220, 1, 0, 0, 0, 12, 222, 1, 0, 0, 0, 14, 229, 1, 0, 0, 0, 16, 236, 1, 0, 0, 0, 18, 249, 1, 0, 0, 0, 20, 261, 1, 0, 0, 0, 22, 270, 1, 0, 0, 0, 24, 278, 1, 0, 0, 0, 26, 300, 1, 0, 0, 0, 28, 315, 1, 0, 0, 0, 30, 324, 1, 0, 0, 0, 32, 329, 1, 0, 0, 0, 34, 333, 1, 0, 0, 0, 36, 335, 1, 0, 0, 0, 38, 344, 1, 0, 0, 0, 40, 348, 1, 0, 0, 0, 42, 362, 1, 0, 0, 0, 44, 366, 1, 0, 0, 0, 46, 381, 1, 0, 0, 0, 48, 384, 1, 0, 0, 0, 50, 433, 1, 0, 0, 0, 52, 436, 1, 0, 0, 0, 54, 442, 1, 0, 0, 0, 56, 446, 1, 0, 0, 0, 58, 452, 1, 0, 0, 0, 60, 470, 1, 0, 0, 0, 62, 473, 1, 0, 0, 0, 64, 476, 1, 0, 0, 0, 66, 486, 1, 0, 0, 0, 68, 489, 1, 0, 0, 0, 70, 493, 1, 0, 0, 0, 72, 526, 1, 0, 0, 0, 74, 528, 1, 0, 0, 0, 76, 531, 1, 0, 0, 0, 78, 546, 1, 0, 0, 0, 80, 608, 1, 0, 0, 0, 82, 613, 1, 0, 0, 0, 84, 624, 1, 0, 0, 0, 86, 626, 1, 0, 0, 0, 88, 632, 1, 0, 0, 0, 90, 640, 1, 0, 0, 0, 92, 658, 1, 0, 0, 0, 94, 660, 1, 0, 0, 0, 96, 668, 1, 0, 0, 0, 98, 673, 1, 0, 0, 0, 100, 681, 1, 0, 0, 0, 102, 685, 1, 0, 0, 0, 104, 689, 1, 0, 0, 0, 106, 698, 1, 0, 0, 0, 108, 712, 1, 0, 0, 0, 110, 714, 1, 0, 0, 0, 112, 773, 1, 0, 0, 0, 114, 775, 1, 0, 0, 0, 116, 935, 1, 0, 0, 0, 118, 1044, 1, 0, 0, 0, 120, 1057, 1, 0, 0, 0, 122, 1084, 1, 0, 0, 0, 124, 1121, 1, 0, 0, 0, 126, 1134, 1, 0, 0, 0, 128, 1136, 1, 0, 0, 0, 130, 1157, 1, 0, 0, 0, 132, 1166, 1, 0, 0, 0, 134, 1168, 1, 0, 0, 0, 136, 1185, 1, 0, 0, 0, 138, 1198, 1, 0, 0, 0, 140, 1208, 1, 0, 0, 0, 142, 1212, 1, 0, 0, 0, 144, 1223, 1, 0, 0, 0, 146, 1233, 1, 0, 0, 0, 148, 1236, 1, 0, 0, 0, 150, 1249, 1, 0, 0, 0, 152, 1251, 1, 0, 0, 0, 154, 1253, 1, 0, 0, 0, 156, 1255, 1, 0, 0, 0, 158, 1259, 1, 0, 0, 0, 160, 1264, 1, 0, 0, 0, 162, 1266, 1, 0, 0, 0, 164, 1270, 1, 0, 0, 0, 166, 1276, 1, 0, 0, 0, 168, 1278, 1, 0, 0, 0, 170, 1292, 1, 0, 0, 0, 172, 1294, 1, 0, 0, 0, 174, 1308, 1, 0, 0, 0, 176, 178, 3, 2, 1, 0, 177, 176, 1, 0, 0, 0, 178, 181, 1, 0, 0, 0, 179, 177, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 182, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 182, 183, 5, 0, 0, 1, 183, 1, 1, 0, 0, 0, 184, 187, 3, 6, 3, 0, 185, 187, 3, 10, 5, 0, 186, 184, 1, 0, 0, 0, 186, 185, 1, 0, 0, 0, 187, 3, 1, 0, 0, 0, 188, 189, 3, 116, 58, 0, 189, 5, 1, 0, 0, 0, 190, 191, 5, 52, 0, 0, 191, 195, 3, 160, 80, 0, 192, 193, 5, 115, 0, 0, 193, 194, 5, 122, 0, 0, 194, 196, 3, 4, 2, 0, 195, 192, 1, 0, 0, 0, 195, 196, 1, 0, 0, 0, 196, 7, 1, 0, 0, 0, 197, 202, 3, 160, 80, 0, 198, 199, 5, 116, 0, 0, 199, 201, 3, 160, 80, 0, 200, 198, 1, 0, 0, 0, 201, 204, 1, 0, 0, 0, 202, 200, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 206, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 205, 207, 5, 116, 0, 0, 206, 205, 1, 0, 0, 0, 206, 207, 1, 0, 0, 0, 207, 9, 1, 0, 0, 0, 208, 221, 3, 12, 6, 0, 209, 221, 3, 14, 7, 0, 210, 221, 3, 18, 9, 0, 211, 221, 3, 20, 10, 0, 212, 221, 3, 22, 11, 0, 213, 221, 3, 26, 13, 0, 214, 221, 3, 24, 12, 0, 215, 221, 3, 28, 14, 0, 216, 221, 3, 30, 15, 0, 217, 221, 3, 36, 18, 0, 218, 221, 3, 32, 16, 0, 219, 221, 3, 34, 17, 0, 220, 208, 1, 0, 0, 0, 220, 209, 1, 0, 0, 0, 220, 210, 1, 0, 0, 0, 220, 211, 1, 0, 0, 0, 220, 212, 1, 0, 0, 0, 220, 213, 1, 0, 0, 0, 220, 214, 1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 220, 216, 1, 0, 0, 0, 220, 217, 1, 0, 0, 0, 220, 218, 1, 0, 0, 0, 220, 219, 1, 0, 0, 0, 221, 11, 1, 0, 0, 0, 222, 224, 5, 72, 0, 0, 223, 225, 3, 4, 2, 0, 224, 223, 1, 0, 0, 0, 224, 225, 1, 0, 0, 0, 225, 227, 1, 0, 0, 0, 226, 228, 5, 150, 0, 0, 227, 226, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 13, 1, 0, 0, 0, 229, 231, 5, 84, 0, 0, 230, 232, 3, 4, 2, 0, 231, 230, 1, 0, 0, 0, 231, 232, 1, 0, 0, 0, 232, 234, 1, 0, 0, 0, 233, 235, 5, 150, 0, 0, 234, 233, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 15, 1, 0, 0, 0, 236, 245, 5, 14, 0, 0, 237, 238, 5, 130, 0, 0, 238, 241, 3, 160, 80, 0, 239, 240, 5, 115, 0, 0, 240, 242, 3, 160, 80, 0, 241, 239, 1, 0, 0, 0, 241, 242, 1, 0, 0, 0, 242, 243, 1, 0, 0, 0, 243, 244, 5, 149, 0, 0, 244, 246, 1, 0, 0, 0, 245, 237, 1, 0, 0, 0, 245, 246, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 248, 3, 36, 18, 0, 248, 17, 1, 0, 0, 0, 249, 250, 5, 93, 0, 0, 250, 254, 3, 36, 18, 0, 251, 253, 3, 16, 8, 0, 252, 251, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 259, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 258, 5, 29, 0, 0, 258, 260, 3, 36, 18, 0, 259, 257, 1, 0, 0, 0, 259, 260, 1, 0, 0, 0, 260, 19, 1, 0, 0, 0, 261, 262, 5, 40, 0, 0, 262, 263, 5, 130, 0, 0, 263, 264, 3, 4, 2, 0, 264, 265, 5, 149, 0, 0, 265, 268, 3, 10, 5, 0, 266, 267, 5, 25, 0, 0, 267, 269, 3, 10, 5, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, 21, 1, 0, 0, 0, 270, 271, 5, 100, 0, 0, 271, 272, 5, 130, 0, 0, 272, 273, 3, 4, 2, 0, 273, 274, 5, 149, 0, 0, 274, 276, 3, 10, 5, 0, 275, 277, 5, 150, 0, 0, 276, 275, 1, 0, 0, 0, 276, 277, 1, 0, 0, 0, 277, 23, 1, 0, 0, 0, 278, 279, 5, 33, 0, 0, 279, 283, 5, 130, 0, 0, 280, 284, 3, 6, 3, 0, 281, 284, 3, 30, 15, 0, 282, 284, 3, 4, 2, 0, 283, 280, 1, 0, 0, 0, 283, 281, 1, 0, 0, 0, 283, 282, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 287, 5, 150, 0, 0, 286, 288, 3, 4, 2, 0, 287, 286, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 289, 1, 0, 0, 0, 289, 293, 5, 150, 0, 0, 290, 294, 3, 6, 3, 0, 291, 294, 3, 30, 15, 0, 292, 294, 3, 4, 2, 0, 293, 290, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 293, 292, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 296, 5, 149, 0, 0, 296, 298, 3, 10, 5, 0, 297, 299, 5, 150, 0, 0, 298, 297, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 25, 1, 0, 0, 0, 300, 301, 5, 33, 0, 0, 301, 302, 5, 130, 0, 0, 302, 303, 5, 52, 0, 0, 303, 306, 3, 160, 80, 0, 304, 305, 5, 116, 0, 0, 305, 307, 3, 160, 80, 0, 306, 304, 1, 0, 0, 0, 306, 307, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 309, 5, 42, 0, 0, 309, 310, 3, 4, 2, 0, 310, 311, 5, 149, 0, 0, 311, 313, 3, 10, 5, 0, 312, 314, 5, 150, 0, 0, 313, 312, 1, 0, 0, 0, 313, 314, 1, 0, 0, 0, 314, 27, 1, 0, 0, 0, 315, 316, 5, 31, 0, 0, 316, 317, 3, 160, 80, 0, 317, 319, 5, 130, 0, 0, 318, 320, 3, 8, 4, 0, 319, 318, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 322, 5, 149, 0, 0, 322, 323, 3, 36, 18, 0, 323, 29, 1, 0, 0, 0, 324, 325, 3, 4, 2, 0, 325, 326, 5, 115, 0, 0, 326, 327, 5, 122, 0, 0, 327, 328, 3, 4, 2, 0, 328, 31, 1, 0, 0, 0, 329, 331, 3, 4, 2, 0, 330, 332, 5, 150, 0, 0, 331, 330, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 33, 1, 0, 0, 0, 333, 334, 5, 150, 0, 0, 334, 35, 1, 0, 0, 0, 335, 339, 5, 128, 0, 0, 336, 338, 3, 2, 1, 0, 337, 336, 1, 0, 0, 0, 338, 341, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 339, 340, 1, 0, 0, 0, 340, 342, 1, 0, 0, 0, 341, 339, 1, 0, 0, 0, 342, 343, 5, 147, 0, 0, 343, 37, 1, 0, 0, 0, 344, 345, 3, 4, 2, 0, 345, 346, 5, 115, 0, 0, 346, 347, 3, 4, 2, 0, 347, 39, 1, 0, 0, 0, 348, 353, 3, 38, 19, 0, 349, 350, 5, 116, 0, 0, 350, 352, 3, 38, 19, 0, 351, 349, 1, 0, 0, 0, 352, 355, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 356, 358, 5, 116, 0, 0, 357, 356, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 41, 1, 0, 0, 0, 359, 363, 3, 44, 22, 0, 360, 363, 3, 48, 24, 0, 361, 363, 3, 124, 62, 0, 362, 359, 1, 0, 0, 0, 362, 360, 1, 0, 0, 0, 362, 361, 1, 0, 0, 0, 363, 364, 1, 0, 0, 0, 364, 365, 5, 0, 0, 1, 365, 43, 1, 0, 0, 0, 366, 372, 3, 46, 23, 0, 367, 368, 5, 95, 0, 0, 368, 369, 5, 1, 0, 0, 369, 371, 3, 46, 23, 0, 370, 367, 1, 0, 0, 0, 371, 374, 1, 0, 0, 0, 372, 370, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 45, 1, 0, 0, 0, 374, 372, 1, 0, 0, 0, 375, 382, 3, 48, 24, 0, 376, 377, 5, 130, 0, 0, 377, 378, 3, 44, 22, 0, 378, 379, 5, 149, 0, 0, 379, 382, 1, 0, 0, 0, 380, 382, 3, 164, 82, 0, 381, 375, 1, 0, 0, 0, 381, 376, 1, 0, 0, 0, 381, 380, 1, 0, 0, 0, 382, 47, 1, 0, 0, 0, 383, 385, 3, 50, 25, 0, 384, 383, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 386, 1, 0, 0, 0, 386, 388, 5, 79, 0, 0, 387, 389, 5, 24, 0, 0, 388, 387, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 391, 1, 0, 0, 0, 390, 392, 3, 52, 26, 0, 391, 390, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 395, 3, 114, 57, 0, 394, 396, 3, 54, 27, 0, 395, 394, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 398, 1, 0, 0, 0, 397, 399, 3, 56, 28, 0, 398, 397, 1, 0, 0, 0, 398, 399, 1, 0, 0, 0, 399, 401, 1, 0, 0, 0, 400, 402, 3, 60, 30, 0, 401, 400, 1, 0, 0, 0, 401, 402, 1, 0, 0, 0, 402, 404, 1, 0, 0, 0, 403, 405, 3, 62, 31, 0, 404, 403, 1, 0, 0, 0, 404, 405, 1, 0, 0, 0, 405, 407, 1, 0, 0, 0, 406, 408, 3, 64, 32, 0, 407, 406, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 411, 1, 0, 0, 0, 409, 410, 5, 102, 0, 0, 410, 412, 7, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 415, 1, 0, 0, 0, 413, 414, 5, 102, 0, 0, 414, 416, 5, 89, 0, 0, 415, 413, 1, 0, 0, 0, 415, 416, 1, 0, 0, 0, 416, 418, 1, 0, 0, 0, 417, 419, 3, 66, 33, 0, 418, 417, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 421, 1, 0, 0, 0, 420, 422, 3, 58, 29, 0, 421, 420, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 424, 1, 0, 0, 0, 423, 425, 3, 68, 34, 0, 424, 423, 1, 0, 0, 0, 424, 425, 1, 0, 0, 0, 425, 428, 1, 0, 0, 0, 426, 429, 3, 72, 36, 0, 427, 429, 3, 74, 37, 0, 428, 426, 1, 0, 0, 0, 428, 427, 1, 0, 0, 0, 428, 429, 1, 0, 0, 0, 429, 431, 1, 0, 0, 0, 430, 432, 3, 76, 38, 0, 431, 430, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 49, 1, 0, 0, 0, 433, 434, 5, 102, 0, 0, 434, 435, 3, 128, 64, 0, 435, 51, 1, 0, 0, 0, 436, 437, 5, 88, 0, 0, 437, 440, 5, 108, 0, 0, 438, 439, 5, 102, 0, 0, 439, 441, 5, 85, 0, 0, 440, 438, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 53, 1, 0, 0, 0, 442, 443, 5, 34, 0, 0, 443, 444, 3, 78, 39, 0, 444, 55, 1, 0, 0, 0, 445, 447, 7, 1, 0, 0, 446, 445, 1, 0, 0, 0, 446, 447, 1, 0, 0, 0, 447, 448, 1, 0, 0, 0, 448, 449, 5, 5, 0, 0, 449, 450, 5, 47, 0, 0, 450, 451, 3, 114, 57, 0, 451, 57, 1, 0, 0, 0, 452, 453, 5, 101, 0, 0, 453, 454, 3, 160, 80, 0, 454, 455, 5, 6, 0, 0, 455, 456, 5, 130, 0, 0, 456, 457, 3, 98, 49, 0, 457, 467, 5, 149, 0, 0, 458, 459, 5, 116, 0, 0, 459, 460, 3, 160, 80, 0, 460, 461, 5, 6, 0, 0, 461, 462, 5, 130, 0, 0, 462, 463, 3, 98, 49, 0, 463, 464, 5, 149, 0, 0, 464, 466, 1, 0, 0, 0, 465, 458, 1, 0, 0, 0, 466, 469, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 467, 468, 1, 0, 0, 0, 468, 59, 1, 0, 0, 0, 469, 467, 1, 0, 0, 0, 470, 471, 5, 69, 0, 0, 471, 472, 3, 116, 58, 0, 472, 61, 1, 0, 0, 0, 473, 474, 5, 99, 0, 0, 474, 475, 3, 116, 58, 0, 475, 63, 1, 0, 0, 0, 476, 477, 5, 36, 0, 0, 477, 484, 5, 11, 0, 0, 478, 479, 7, 0, 0, 0, 479, 480, 5, 130, 0, 0, 480, 481, 3, 114, 57, 0, 481, 482, 5, 149, 0, 0, 482, 485, 1, 0, 0, 0, 483, 485, 3, 114, 57, 0, 484, 478, 1, 0, 0, 0, 484, 483, 1, 0, 0, 0, 485, 65, 1, 0, 0, 0, 486, 487, 5, 37, 0, 0, 487, 488, 3, 116, 58, 0, 488, 67, 1, 0, 0, 0, 489, 490, 5, 64, 0, 0, 490, 491, 5, 11, 0, 0, 491, 492, 3, 88, 44, 0, 492, 69, 1, 0, 0, 0, 493, 494, 5, 64, 0, 0, 494, 495, 5, 11, 0, 0, 495, 496, 3, 114, 57, 0, 496, 71, 1, 0, 0, 0, 497, 498, 5, 54, 0, 0, 498, 501, 3, 116, 58, 0, 499, 500, 5, 116, 0, 0, 500, 502, 3, 116, 58, 0, 501, 499, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 507, 1, 0, 0, 0, 503, 504, 5, 102, 0, 0, 504, 508, 5, 85, 0, 0, 505, 506, 5, 11, 0, 0, 506, 508, 3, 114, 57, 0, 507, 503, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 527, 1, 0, 0, 0, 509, 510, 5, 54, 0, 0, 510, 513, 3, 116, 58, 0, 511, 512, 5, 102, 0, 0, 512, 514, 5, 85, 0, 0, 513, 511, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 5, 61, 0, 0, 516, 517, 3, 116, 58, 0, 517, 527, 1, 0, 0, 0, 518, 519, 5, 54, 0, 0, 519, 520, 3, 116, 58, 0, 520, 521, 5, 61, 0, 0, 521, 524, 3, 116, 58, 0, 522, 523, 5, 11, 0, 0, 523, 525, 3, 114, 57, 0, 524, 522, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 527, 1, 0, 0, 0, 526, 497, 1, 0, 0, 0, 526, 509, 1, 0, 0, 0, 526, 518, 1, 0, 0, 0, 527, 73, 1, 0, 0, 0, 528, 529, 5, 61, 0, 0, 529, 530, 3, 116, 58, 0, 530, 75, 1, 0, 0, 0, 531, 532, 5, 81, 0, 0, 532, 533, 3, 94, 47, 0, 533, 77, 1, 0, 0, 0, 534, 535, 6, 39, -1, 0, 535, 537, 3, 136, 68, 0, 536, 538, 5, 28, 0, 0, 537, 536, 1, 0, 0, 0, 537, 538, 1, 0, 0, 0, 538, 540, 1, 0, 0, 0, 539, 541, 3, 86, 43, 0, 540, 539, 1, 0, 0, 0, 540, 541, 1, 0, 0, 0, 541, 547, 1, 0, 0, 0, 542, 543, 5, 130, 0, 0, 543, 544, 3, 78, 39, 0, 544, 545, 5, 149, 0, 0, 545, 547, 1, 0, 0, 0, 546, 534, 1, 0, 0, 0, 546, 542, 1, 0, 0, 0, 547, 562, 1, 0, 0, 0, 548, 549, 10, 3, 0, 0, 549, 550, 3, 82, 41, 0, 550, 551, 3, 78, 39, 4, 551, 561, 1, 0, 0, 0, 552, 554, 10, 4, 0, 0, 553, 555, 3, 80, 40, 0, 554, 553, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 5, 47, 0, 0, 557, 558, 3, 78, 39, 0, 558, 559, 3, 84, 42, 0, 559, 561, 1, 0, 0, 0, 560, 548, 1, 0, 0, 0, 560, 552, 1, 0, 0, 0, 561, 564, 1, 0, 0, 0, 562, 560, 1, 0, 0, 0, 562, 563, 1, 0, 0, 0, 563, 79, 1, 0, 0, 0, 564, 562, 1, 0, 0, 0, 565, 567, 7, 2, 0, 0, 566, 565, 1, 0, 0, 0, 566, 567, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 575, 5, 44, 0, 0, 569, 571, 5, 44, 0, 0, 570, 572, 7, 2, 0, 0, 571, 570, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 575, 1, 0, 0, 0, 573, 575, 7, 2, 0, 0, 574, 566, 1, 0, 0, 0, 574, 569, 1, 0, 0, 0, 574, 573, 1, 0, 0, 0, 575, 609, 1, 0, 0, 0, 576, 578, 7, 3, 0, 0, 577, 576, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 581, 7, 4, 0, 0, 580, 582, 5, 65, 0, 0, 581, 580, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 591, 1, 0, 0, 0, 583, 585, 7, 4, 0, 0, 584, 586, 5, 65, 0, 0, 585, 584, 1, 0, 0, 0, 585, 586, 1, 0, 0, 0, 586, 588, 1, 0, 0, 0, 587, 589, 7, 3, 0, 0, 588, 587, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 591, 1, 0, 0, 0, 590, 577, 1, 0, 0, 0, 590, 583, 1, 0, 0, 0, 591, 609, 1, 0, 0, 0, 592, 594, 7, 5, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 595, 1, 0, 0, 0, 595, 597, 5, 35, 0, 0, 596, 598, 5, 65, 0, 0, 597, 596, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 607, 1, 0, 0, 0, 599, 601, 5, 35, 0, 0, 600, 602, 5, 65, 0, 0, 601, 600, 1, 0, 0, 0, 601, 602, 1, 0, 0, 0, 602, 604, 1, 0, 0, 0, 603, 605, 7, 5, 0, 0, 604, 603, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 607, 1, 0, 0, 0, 606, 593, 1, 0, 0, 0, 606, 599, 1, 0, 0, 0, 607, 609, 1, 0, 0, 0, 608, 574, 1, 0, 0, 0, 608, 590, 1, 0, 0, 0, 608, 606, 1, 0, 0, 0, 609, 81, 1, 0, 0, 0, 610, 611, 5, 17, 0, 0, 611, 614, 5, 47, 0, 0, 612, 614, 5, 116, 0, 0, 613, 610, 1, 0, 0, 0, 613, 612, 1, 0, 0, 0, 614, 83, 1, 0, 0, 0, 615, 616, 5, 62, 0, 0, 616, 625, 3, 114, 57, 0, 617, 618, 5, 96, 0, 0, 618, 619, 5, 130, 0, 0, 619, 620, 3, 114, 57, 0, 620, 621, 5, 149, 0, 0, 621, 625, 1, 0, 0, 0, 622, 623, 5, 96, 0, 0, 623, 625, 3, 114, 57, 0, 624, 615, 1, 0, 0, 0, 624, 617, 1, 0, 0, 0, 624, 622, 1, 0, 0, 0, 625, 85, 1, 0, 0, 0, 626, 627, 5, 77, 0, 0, 627, 630, 3, 92, 46, 0, 628, 629, 5, 61, 0, 0, 629, 631, 3, 92, 46, 0, 630, 628, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 87, 1, 0, 0, 0, 632, 637, 3, 90, 45, 0, 633, 634, 5, 116, 0, 0, 634, 636, 3, 90, 45, 0, 635, 633, 1, 0, 0, 0, 636, 639, 1, 0, 0, 0, 637, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 89, 1, 0, 0, 0, 639, 637, 1, 0, 0, 0, 640, 642, 3, 116, 58, 0, 641, 643, 7, 6, 0, 0, 642, 641, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 646, 1, 0, 0, 0, 644, 645, 5, 60, 0, 0, 645, 647, 7, 7, 0, 0, 646, 644, 1, 0, 0, 0, 646, 647, 1, 0, 0, 0, 647, 650, 1, 0, 0, 0, 648, 649, 5, 16, 0, 0, 649, 651, 5, 110, 0, 0, 650, 648, 1, 0, 0, 0, 650, 651, 1, 0, 0, 0, 651, 91, 1, 0, 0, 0, 652, 659, 3, 164, 82, 0, 653, 656, 3, 148, 74, 0, 654, 655, 5, 151, 0, 0, 655, 657, 3, 148, 74, 0, 656, 654, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 659, 1, 0, 0, 0, 658, 652, 1, 0, 0, 0, 658, 653, 1, 0, 0, 0, 659, 93, 1, 0, 0, 0, 660, 665, 3, 96, 48, 0, 661, 662, 5, 116, 0, 0, 662, 664, 3, 96, 48, 0, 663, 661, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 95, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 669, 3, 160, 80, 0, 669, 670, 5, 122, 0, 0, 670, 671, 3, 150, 75, 0, 671, 97, 1, 0, 0, 0, 672, 674, 3, 100, 50, 0, 673, 672, 1, 0, 0, 0, 673, 674, 1, 0, 0, 0, 674, 676, 1, 0, 0, 0, 675, 677, 3, 102, 51, 0, 676, 675, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 677, 679, 1, 0, 0, 0, 678, 680, 3, 104, 52, 0, 679, 678, 1, 0, 0, 0, 679, 680, 1, 0, 0, 0, 680, 99, 1, 0, 0, 0, 681, 682, 5, 67, 0, 0, 682, 683, 5, 11, 0, 0, 683, 684, 3, 114, 57, 0, 684, 101, 1, 0, 0, 0, 685, 686, 5, 64, 0, 0, 686, 687, 5, 11, 0, 0, 687, 688, 3, 88, 44, 0, 688, 103, 1, 0, 0, 0, 689, 690, 7, 8, 0, 0, 690, 691, 3, 106, 53, 0, 691, 105, 1, 0, 0, 0, 692, 699, 3, 108, 54, 0, 693, 694, 5, 9, 0, 0, 694, 695, 3, 108, 54, 0, 695, 696, 5, 2, 0, 0, 696, 697, 3, 108, 54, 0, 697, 699, 1, 0, 0, 0, 698, 692, 1, 0, 0, 0, 698, 693, 1, 0, 0, 0, 699, 107, 1, 0, 0, 0, 700, 701, 5, 19, 0, 0, 701, 713, 5, 75, 0, 0, 702, 703, 5, 94, 0, 0, 703, 713, 5, 68, 0, 0, 704, 705, 5, 94, 0, 0, 705, 713, 5, 32, 0, 0, 706, 707, 3, 148, 74, 0, 707, 708, 5, 68, 0, 0, 708, 713, 1, 0, 0, 0, 709, 710, 3, 148, 74, 0, 710, 711, 5, 32, 0, 0, 711, 713, 1, 0, 0, 0, 712, 700, 1, 0, 0, 0, 712, 702, 1, 0, 0, 0, 712, 704, 1, 0, 0, 0, 712, 706, 1, 0, 0, 0, 712, 709, 1, 0, 0, 0, 713, 109, 1, 0, 0, 0, 714, 715, 3, 116, 58, 0, 715, 716, 5, 0, 0, 1, 716, 111, 1, 0, 0, 0, 717, 774, 3, 160, 80, 0, 718, 719, 3, 160, 80, 0, 719, 720, 5, 130, 0, 0, 720, 721, 3, 160, 80, 0, 721, 728, 3, 112, 56, 0, 722, 723, 5, 116, 0, 0, 723, 724, 3, 160, 80, 0, 724, 725, 3, 112, 56, 0, 725, 727, 1, 0, 0, 0, 726, 722, 1, 0, 0, 0, 727, 730, 1, 0, 0, 0, 728, 726, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 732, 1, 0, 0, 0, 730, 728, 1, 0, 0, 0, 731, 733, 5, 116, 0, 0, 732, 731, 1, 0, 0, 0, 732, 733, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 735, 5, 149, 0, 0, 735, 774, 1, 0, 0, 0, 736, 737, 3, 160, 80, 0, 737, 738, 5, 130, 0, 0, 738, 743, 3, 162, 81, 0, 739, 740, 5, 116, 0, 0, 740, 742, 3, 162, 81, 0, 741, 739, 1, 0, 0, 0, 742, 745, 1, 0, 0, 0, 743, 741, 1, 0, 0, 0, 743, 744, 1, 0, 0, 0, 744, 747, 1, 0, 0, 0, 745, 743, 1, 0, 0, 0, 746, 748, 5, 116, 0, 0, 747, 746, 1, 0, 0, 0, 747, 748, 1, 0, 0, 0, 748, 749, 1, 0, 0, 0, 749, 750, 5, 149, 0, 0, 750, 774, 1, 0, 0, 0, 751, 752, 3, 160, 80, 0, 752, 753, 5, 130, 0, 0, 753, 758, 3, 112, 56, 0, 754, 755, 5, 116, 0, 0, 755, 757, 3, 112, 56, 0, 756, 754, 1, 0, 0, 0, 757, 760, 1, 0, 0, 0, 758, 756, 1, 0, 0, 0, 758, 759, 1, 0, 0, 0, 759, 762, 1, 0, 0, 0, 760, 758, 1, 0, 0, 0, 761, 763, 5, 116, 0, 0, 762, 761, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 763, 764, 1, 0, 0, 0, 764, 765, 5, 149, 0, 0, 765, 774, 1, 0, 0, 0, 766, 767, 3, 160, 80, 0, 767, 769, 5, 130, 0, 0, 768, 770, 3, 114, 57, 0, 769, 768, 1, 0, 0, 0, 769, 770, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 772, 5, 149, 0, 0, 772, 774, 1, 0, 0, 0, 773, 717, 1, 0, 0, 0, 773, 718, 1, 0, 0, 0, 773, 736, 1, 0, 0, 0, 773, 751, 1, 0, 0, 0, 773, 766, 1, 0, 0, 0, 774, 113, 1, 0, 0, 0, 775, 780, 3, 116, 58, 0, 776, 777, 5, 116, 0, 0, 777, 779, 3, 116, 58, 0, 778, 776, 1, 0, 0, 0, 779, 782, 1, 0, 0, 0, 780, 778, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 784, 1, 0, 0, 0, 782, 780, 1, 0, 0, 0, 783, 785, 5, 116, 0, 0, 784, 783, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 115, 1, 0, 0, 0, 786, 787, 6, 58, -1, 0, 787, 789, 5, 12, 0, 0, 788, 790, 3, 116, 58, 0, 789, 788, 1, 0, 0, 0, 789, 790, 1, 0, 0, 0, 790, 796, 1, 0, 0, 0, 791, 792, 5, 98, 0, 0, 792, 793, 3, 116, 58, 0, 793, 794, 5, 83, 0, 0, 794, 795, 3, 116, 58, 0, 795, 797, 1, 0, 0, 0, 796, 791, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 802, 1, 0, 0, 0, 800, 801, 5, 25, 0, 0, 801, 803, 3, 116, 58, 0, 802, 800, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 805, 5, 26, 0, 0, 805, 936, 1, 0, 0, 0, 806, 807, 5, 13, 0, 0, 807, 808, 5, 130, 0, 0, 808, 809, 3, 116, 58, 0, 809, 810, 5, 6, 0, 0, 810, 811, 3, 112, 56, 0, 811, 812, 5, 149, 0, 0, 812, 936, 1, 0, 0, 0, 813, 814, 5, 20, 0, 0, 814, 936, 5, 110, 0, 0, 815, 816, 5, 45, 0, 0, 816, 817, 3, 116, 58, 0, 817, 818, 3, 152, 76, 0, 818, 936, 1, 0, 0, 0, 819, 820, 5, 82, 0, 0, 820, 821, 5, 130, 0, 0, 821, 822, 3, 116, 58, 0, 822, 823, 5, 34, 0, 0, 823, 826, 3, 116, 58, 0, 824, 825, 5, 33, 0, 0, 825, 827, 3, 116, 58, 0, 826, 824, 1, 0, 0, 0, 826, 827, 1, 0, 0, 0, 827, 828, 1, 0, 0, 0, 828, 829, 5, 149, 0, 0, 829, 936, 1, 0, 0, 0, 830, 831, 5, 86, 0, 0, 831, 936, 5, 110, 0, 0, 832, 833, 5, 91, 0, 0, 833, 834, 5, 130, 0, 0, 834, 835, 7, 9, 0, 0, 835, 836, 3, 166, 83, 0, 836, 837, 5, 34, 0, 0, 837, 838, 3, 116, 58, 0, 838, 839, 5, 149, 0, 0, 839, 936, 1, 0, 0, 0, 840, 841, 3, 160, 80, 0, 841, 843, 5, 130, 0, 0, 842, 844, 3, 114, 57, 0, 843, 842, 1, 0, 0, 0, 843, 844, 1, 0, 0, 0, 844, 845, 1, 0, 0, 0, 845, 846, 5, 149, 0, 0, 846, 855, 1, 0, 0, 0, 847, 849, 5, 130, 0, 0, 848, 850, 5, 24, 0, 0, 849, 848, 1, 0, 0, 0, 849, 850, 1, 0, 0, 0, 850, 852, 1, 0, 0, 0, 851, 853, 3, 118, 59, 0, 852, 851, 1, 0, 0, 0, 852, 853, 1, 0, 0, 0, 853, 854, 1, 0, 0, 0, 854, 856, 5, 149, 0, 0, 855, 847, 1, 0, 0, 0, 855, 856, 1, 0, 0, 0, 856, 857, 1, 0, 0, 0, 857, 858, 5, 66, 0, 0, 858, 859, 5, 130, 0, 0, 859, 860, 3, 98, 49, 0, 860, 861, 5, 149, 0, 0, 861, 936, 1, 0, 0, 0, 862, 863, 3, 160, 80, 0, 863, 865, 5, 130, 0, 0, 864, 866, 3, 114, 57, 0, 865, 864, 1, 0, 0, 0, 865, 866, 1, 0, 0, 0, 866, 867, 1, 0, 0, 0, 867, 868, 5, 149, 0, 0, 868, 877, 1, 0, 0, 0, 869, 871, 5, 130, 0, 0, 870, 872, 5, 24, 0, 0, 871, 870, 1, 0, 0, 0, 871, 872, 1, 0, 0, 0, 872, 874, 1, 0, 0, 0, 873, 875, 3, 118, 59, 0, 874, 873, 1, 0, 0, 0, 874, 875, 1, 0, 0, 0, 875, 876, 1, 0, 0, 0, 876, 878, 5, 149, 0, 0, 877, 869, 1, 0, 0, 0, 877, 878, 1, 0, 0, 0, 878, 879, 1, 0, 0, 0, 879, 880, 5, 66, 0, 0, 880, 881, 3, 160, 80, 0, 881, 936, 1, 0, 0, 0, 882, 888, 3, 160, 80, 0, 883, 885, 5, 130, 0, 0, 884, 886, 3, 114, 57, 0, 885, 884, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 887, 1, 0, 0, 0, 887, 889, 5, 149, 0, 0, 888, 883, 1, 0, 0, 0, 888, 889, 1, 0, 0, 0, 889, 890, 1, 0, 0, 0, 890, 892, 5, 130, 0, 0, 891, 893, 5, 24, 0, 0, 892, 891, 1, 0, 0, 0, 892, 893, 1, 0, 0, 0, 893, 895, 1, 0, 0, 0, 894, 896, 3, 118, 59, 0, 895, 894, 1, 0, 0, 0, 895, 896, 1, 0, 0, 0, 896, 897, 1, 0, 0, 0, 897, 898, 5, 149, 0, 0, 898, 936, 1, 0, 0, 0, 899, 936, 3, 124, 62, 0, 900, 936, 3, 168, 84, 0, 901, 936, 3, 150, 75, 0, 902, 903, 5, 118, 0, 0, 903, 936, 3, 116, 58, 19, 904, 905, 5, 58, 0, 0, 905, 936, 3, 116, 58, 13, 906, 907, 3, 140, 70, 0, 907, 908, 5, 120, 0, 0, 908, 910, 1, 0, 0, 0, 909, 906, 1, 0, 0, 0, 909, 910, 1, 0, 0, 0, 910, 911, 1, 0, 0, 0, 911, 936, 5, 112, 0, 0, 912, 913, 5, 130, 0, 0, 913, 914, 3, 44, 22, 0, 914, 915, 5, 149, 0, 0, 915, 936, 1, 0, 0, 0, 916, 917, 5, 130, 0, 0, 917, 918, 3, 116, 58, 0, 918, 919, 5, 149, 0, 0, 919, 936, 1, 0, 0, 0, 920, 921, 5, 130, 0, 0, 921, 922, 3, 114, 57, 0, 922, 923, 5, 149, 0, 0, 923, 936, 1, 0, 0, 0, 924, 926, 5, 129, 0, 0, 925, 927, 3, 114, 57, 0, 926, 925, 1, 0, 0, 0, 926, 927, 1, 0, 0, 0, 927, 928, 1, 0, 0, 0, 928, 936, 5, 148, 0, 0, 929, 931, 5, 128, 0, 0, 930, 932, 3, 40, 20, 0, 931, 930, 1, 0, 0, 0, 931, 932, 1, 0, 0, 0, 932, 933, 1, 0, 0, 0, 933, 936, 5, 147, 0, 0, 934, 936, 3, 132, 66, 0, 935, 786, 1, 0, 0, 0, 935, 806, 1, 0, 0, 0, 935, 813, 1, 0, 0, 0, 935, 815, 1, 0, 0, 0, 935, 819, 1, 0, 0, 0, 935, 830, 1, 0, 0, 0, 935, 832, 1, 0, 0, 0, 935, 840, 1, 0, 0, 0, 935, 862, 1, 0, 0, 0, 935, 882, 1, 0, 0, 0, 935, 899, 1, 0, 0, 0, 935, 900, 1, 0, 0, 0, 935, 901, 1, 0, 0, 0, 935, 902, 1, 0, 0, 0, 935, 904, 1, 0, 0, 0, 935, 909, 1, 0, 0, 0, 935, 912, 1, 0, 0, 0, 935, 916, 1, 0, 0, 0, 935, 920, 1, 0, 0, 0, 935, 924, 1, 0, 0, 0, 935, 929, 1, 0, 0, 0, 935, 934, 1, 0, 0, 0, 936, 1041, 1, 0, 0, 0, 937, 941, 10, 18, 0, 0, 938, 942, 5, 112, 0, 0, 939, 942, 5, 151, 0, 0, 940, 942, 5, 138, 0, 0, 941, 938, 1, 0, 0, 0, 941, 939, 1, 0, 0, 0, 941, 940, 1, 0, 0, 0, 942, 943, 1, 0, 0, 0, 943, 1040, 3, 116, 58, 19, 944, 948, 10, 17, 0, 0, 945, 949, 5, 139, 0, 0, 946, 949, 5, 118, 0, 0, 947, 949, 5, 117, 0, 0, 948, 945, 1, 0, 0, 0, 948, 946, 1, 0, 0, 0, 948, 947, 1, 0, 0, 0, 949, 950, 1, 0, 0, 0, 950, 1040, 3, 116, 58, 18, 951, 976, 10, 16, 0, 0, 952, 977, 5, 121, 0, 0, 953, 977, 5, 122, 0, 0, 954, 977, 5, 133, 0, 0, 955, 977, 5, 131, 0, 0, 956, 977, 5, 132, 0, 0, 957, 977, 5, 123, 0, 0, 958, 977, 5, 124, 0, 0, 959, 961, 5, 58, 0, 0, 960, 959, 1, 0, 0, 0, 960, 961, 1, 0, 0, 0, 961, 962, 1, 0, 0, 0, 962, 964, 5, 42, 0, 0, 963, 965, 5, 15, 0, 0, 964, 963, 1, 0, 0, 0, 964, 965, 1, 0, 0, 0, 965, 977, 1, 0, 0, 0, 966, 968, 5, 58, 0, 0, 967, 966, 1, 0, 0, 0, 967, 968, 1, 0, 0, 0, 968, 969, 1, 0, 0, 0, 969, 977, 7, 10, 0, 0, 970, 977, 5, 145, 0, 0, 971, 977, 5, 146, 0, 0, 972, 977, 5, 135, 0, 0, 973, 977, 5, 126, 0, 0, 974, 977, 5, 127, 0, 0, 975, 977, 5, 134, 0, 0, 976, 952, 1, 0, 0, 0, 976, 953, 1, 0, 0, 0, 976, 954, 1, 0, 0, 0, 976, 955, 1, 0, 0, 0, 976, 956, 1, 0, 0, 0, 976, 957, 1, 0, 0, 0, 976, 958, 1, 0, 0, 0, 976, 960, 1, 0, 0, 0, 976, 967, 1, 0, 0, 0, 976, 970, 1, 0, 0, 0, 976, 971, 1, 0, 0, 0, 976, 972, 1, 0, 0, 0, 976, 973, 1, 0, 0, 0, 976, 974, 1, 0, 0, 0, 976, 975, 1, 0, 0, 0, 977, 978, 1, 0, 0, 0, 978, 1040, 3, 116, 58, 17, 979, 980, 10, 14, 0, 0, 980, 981, 5, 137, 0, 0, 981, 1040, 3, 116, 58, 15, 982, 983, 10, 12, 0, 0, 983, 984, 5, 2, 0, 0, 984, 1040, 3, 116, 58, 13, 985, 986, 10, 11, 0, 0, 986, 987, 5, 63, 0, 0, 987, 1040, 3, 116, 58, 12, 988, 990, 10, 10, 0, 0, 989, 991, 5, 58, 0, 0, 990, 989, 1, 0, 0, 0, 990, 991, 1, 0, 0, 0, 991, 992, 1, 0, 0, 0, 992, 993, 5, 9, 0, 0, 993, 994, 3, 116, 58, 0, 994, 995, 5, 2, 0, 0, 995, 996, 3, 116, 58, 11, 996, 1040, 1, 0, 0, 0, 997, 998, 10, 9, 0, 0, 998, 999, 5, 140, 0, 0, 999, 1000, 3, 116, 58, 0, 1000, 1001, 5, 115, 0, 0, 1001, 1002, 3, 116, 58, 9, 1002, 1040, 1, 0, 0, 0, 1003, 1004, 10, 25, 0, 0, 1004, 1005, 5, 129, 0, 0, 1005, 1006, 3, 116, 58, 0, 1006, 1007, 5, 148, 0, 0, 1007, 1040, 1, 0, 0, 0, 1008, 1009, 10, 24, 0, 0, 1009, 1010, 5, 120, 0, 0, 1010, 1040, 5, 108, 0, 0, 1011, 1012, 10, 23, 0, 0, 1012, 1013, 5, 120, 0, 0, 1013, 1040, 3, 160, 80, 0, 1014, 1015, 10, 22, 0, 0, 1015, 1016, 5, 136, 0, 0, 1016, 1017, 5, 129, 0, 0, 1017, 1018, 3, 116, 58, 0, 1018, 1019, 5, 148, 0, 0, 1019, 1040, 1, 0, 0, 0, 1020, 1021, 10, 21, 0, 0, 1021, 1022, 5, 136, 0, 0, 1022, 1040, 5, 108, 0, 0, 1023, 1024, 10, 20, 0, 0, 1024, 1025, 5, 136, 0, 0, 1025, 1040, 3, 160, 80, 0, 1026, 1027, 10, 15, 0, 0, 1027, 1029, 5, 46, 0, 0, 1028, 1030, 5, 58, 0, 0, 1029, 1028, 1, 0, 0, 0, 1029, 1030, 1, 0, 0, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1040, 5, 59, 0, 0, 1032, 1037, 10, 8, 0, 0, 1033, 1034, 5, 6, 0, 0, 1034, 1038, 3, 160, 80, 0, 1035, 1036, 5, 6, 0, 0, 1036, 1038, 5, 110, 0, 0, 1037, 1033, 1, 0, 0, 0, 1037, 1035, 1, 0, 0, 0, 1038, 1040, 1, 0, 0, 0, 1039, 937, 1, 0, 0, 0, 1039, 944, 1, 0, 0, 0, 1039, 951, 1, 0, 0, 0, 1039, 979, 1, 0, 0, 0, 1039, 982, 1, 0, 0, 0, 1039, 985, 1, 0, 0, 0, 1039, 988, 1, 0, 0, 0, 1039, 997, 1, 0, 0, 0, 1039, 1003, 1, 0, 0, 0, 1039, 1008, 1, 0, 0, 0, 1039, 1011, 1, 0, 0, 0, 1039, 1014, 1, 0, 0, 0, 1039, 1020, 1, 0, 0, 0, 1039, 1023, 1, 0, 0, 0, 1039, 1026, 1, 0, 0, 0, 1039, 1032, 1, 0, 0, 0, 1040, 1043, 1, 0, 0, 0, 1041, 1039, 1, 0, 0, 0, 1041, 1042, 1, 0, 0, 0, 1042, 117, 1, 0, 0, 0, 1043, 1041, 1, 0, 0, 0, 1044, 1049, 3, 120, 60, 0, 1045, 1046, 5, 116, 0, 0, 1046, 1048, 3, 120, 60, 0, 1047, 1045, 1, 0, 0, 0, 1048, 1051, 1, 0, 0, 0, 1049, 1047, 1, 0, 0, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1053, 1, 0, 0, 0, 1051, 1049, 1, 0, 0, 0, 1052, 1054, 5, 116, 0, 0, 1053, 1052, 1, 0, 0, 0, 1053, 1054, 1, 0, 0, 0, 1054, 119, 1, 0, 0, 0, 1055, 1058, 3, 122, 61, 0, 1056, 1058, 3, 116, 58, 0, 1057, 1055, 1, 0, 0, 0, 1057, 1056, 1, 0, 0, 0, 1058, 121, 1, 0, 0, 0, 1059, 1060, 5, 130, 0, 0, 1060, 1065, 3, 160, 80, 0, 1061, 1062, 5, 116, 0, 0, 1062, 1064, 3, 160, 80, 0, 1063, 1061, 1, 0, 0, 0, 1064, 1067, 1, 0, 0, 0, 1065, 1063, 1, 0, 0, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1069, 1, 0, 0, 0, 1067, 1065, 1, 0, 0, 0, 1068, 1070, 5, 116, 0, 0, 1069, 1068, 1, 0, 0, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1072, 5, 149, 0, 0, 1072, 1085, 1, 0, 0, 0, 1073, 1078, 3, 160, 80, 0, 1074, 1075, 5, 116, 0, 0, 1075, 1077, 3, 160, 80, 0, 1076, 1074, 1, 0, 0, 0, 1077, 1080, 1, 0, 0, 0, 1078, 1076, 1, 0, 0, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1082, 1, 0, 0, 0, 1080, 1078, 1, 0, 0, 0, 1081, 1083, 5, 116, 0, 0, 1082, 1081, 1, 0, 0, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1085, 1, 0, 0, 0, 1084, 1059, 1, 0, 0, 0, 1084, 1073, 1, 0, 0, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 5, 111, 0, 0, 1087, 1088, 3, 116, 58, 0, 1088, 123, 1, 0, 0, 0, 1089, 1090, 5, 132, 0, 0, 1090, 1094, 3, 160, 80, 0, 1091, 1093, 3, 126, 63, 0, 1092, 1091, 1, 0, 0, 0, 1093, 1096, 1, 0, 0, 0, 1094, 1092, 1, 0, 0, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1097, 1, 0, 0, 0, 1096, 1094, 1, 0, 0, 0, 1097, 1098, 5, 151, 0, 0, 1098, 1099, 5, 124, 0, 0, 1099, 1122, 1, 0, 0, 0, 1100, 1101, 5, 132, 0, 0, 1101, 1105, 3, 160, 80, 0, 1102, 1104, 3, 126, 63, 0, 1103, 1102, 1, 0, 0, 0, 1104, 1107, 1, 0, 0, 0, 1105, 1103, 1, 0, 0, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1108, 1, 0, 0, 0, 1107, 1105, 1, 0, 0, 0, 1108, 1114, 5, 124, 0, 0, 1109, 1115, 3, 124, 62, 0, 1110, 1111, 5, 128, 0, 0, 1111, 1112, 3, 116, 58, 0, 1112, 1113, 5, 147, 0, 0, 1113, 1115, 1, 0, 0, 0, 1114, 1109, 1, 0, 0, 0, 1114, 1110, 1, 0, 0, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 5, 132, 0, 0, 1117, 1118, 5, 151, 0, 0, 1118, 1119, 3, 160, 80, 0, 1119, 1120, 5, 124, 0, 0, 1120, 1122, 1, 0, 0, 0, 1121, 1089, 1, 0, 0, 0, 1121, 1100, 1, 0, 0, 0, 1122, 125, 1, 0, 0, 0, 1123, 1124, 3, 160, 80, 0, 1124, 1125, 5, 122, 0, 0, 1125, 1126, 3, 166, 83, 0, 1126, 1135, 1, 0, 0, 0, 1127, 1128, 3, 160, 80, 0, 1128, 1129, 5, 122, 0, 0, 1129, 1130, 5, 128, 0, 0, 1130, 1131, 3, 116, 58, 0, 1131, 1132, 5, 147, 0, 0, 1132, 1135, 1, 0, 0, 0, 1133, 1135, 3, 160, 80, 0, 1134, 1123, 1, 0, 0, 0, 1134, 1127, 1, 0, 0, 0, 1134, 1133, 1, 0, 0, 0, 1135, 127, 1, 0, 0, 0, 1136, 1141, 3, 130, 65, 0, 1137, 1138, 5, 116, 0, 0, 1138, 1140, 3, 130, 65, 0, 1139, 1137, 1, 0, 0, 0, 1140, 1143, 1, 0, 0, 0, 1141, 1139, 1, 0, 0, 0, 1141, 1142, 1, 0, 0, 0, 1142, 1145, 1, 0, 0, 0, 1143, 1141, 1, 0, 0, 0, 1144, 1146, 5, 116, 0, 0, 1145, 1144, 1, 0, 0, 0, 1145, 1146, 1, 0, 0, 0, 1146, 129, 1, 0, 0, 0, 1147, 1148, 3, 160, 80, 0, 1148, 1149, 5, 6, 0, 0, 1149, 1150, 5, 130, 0, 0, 1150, 1151, 3, 44, 22, 0, 1151, 1152, 5, 149, 0, 0, 1152, 1158, 1, 0, 0, 0, 1153, 1154, 3, 116, 58, 0, 1154, 1155, 5, 6, 0, 0, 1155, 1156, 3, 160, 80, 0, 1156, 1158, 1, 0, 0, 0, 1157, 1147, 1, 0, 0, 0, 1157, 1153, 1, 0, 0, 0, 1158, 131, 1, 0, 0, 0, 1159, 1167, 3, 164, 82, 0, 1160, 1161, 3, 140, 70, 0, 1161, 1162, 5, 120, 0, 0, 1162, 1164, 1, 0, 0, 0, 1163, 1160, 1, 0, 0, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1167, 3, 134, 67, 0, 1166, 1159, 1, 0, 0, 0, 1166, 1163, 1, 0, 0, 0, 1167, 133, 1, 0, 0, 0, 1168, 1173, 3, 160, 80, 0, 1169, 1170, 5, 120, 0, 0, 1170, 1172, 3, 160, 80, 0, 1171, 1169, 1, 0, 0, 0, 1172, 1175, 1, 0, 0, 0, 1173, 1171, 1, 0, 0, 0, 1173, 1174, 1, 0, 0, 0, 1174, 135, 1, 0, 0, 0, 1175, 1173, 1, 0, 0, 0, 1176, 1177, 6, 68, -1, 0, 1177, 1186, 3, 140, 70, 0, 1178, 1186, 3, 138, 69, 0, 1179, 1180, 5, 130, 0, 0, 1180, 1181, 3, 44, 22, 0, 1181, 1182, 5, 149, 0, 0, 1182, 1186, 1, 0, 0, 0, 1183, 1186, 3, 124, 62, 0, 1184, 1186, 3, 164, 82, 0, 1185, 1176, 1, 0, 0, 0, 1185, 1178, 1, 0, 0, 0, 1185, 1179, 1, 0, 0, 0, 1185, 1183, 1, 0, 0, 0, 1185, 1184, 1, 0, 0, 0, 1186, 1195, 1, 0, 0, 0, 1187, 1191, 10, 3, 0, 0, 1188, 1192, 3, 158, 79, 0, 1189, 1190, 5, 6, 0, 0, 1190, 1192, 3, 160, 80, 0, 1191, 1188, 1, 0, 0, 0, 1191, 1189, 1, 0, 0, 0, 1192, 1194, 1, 0, 0, 0, 1193, 1187, 1, 0, 0, 0, 1194, 1197, 1, 0, 0, 0, 1195, 1193, 1, 0, 0, 0, 1195, 1196, 1, 0, 0, 0, 1196, 137, 1, 0, 0, 0, 1197, 1195, 1, 0, 0, 0, 1198, 1199, 3, 160, 80, 0, 1199, 1201, 5, 130, 0, 0, 1200, 1202, 3, 142, 71, 0, 1201, 1200, 1, 0, 0, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 1, 0, 0, 0, 1203, 1204, 5, 149, 0, 0, 1204, 139, 1, 0, 0, 0, 1205, 1206, 3, 144, 72, 0, 1206, 1207, 5, 120, 0, 0, 1207, 1209, 1, 0, 0, 0, 1208, 1205, 1, 0, 0, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 3, 160, 80, 0, 1211, 141, 1, 0, 0, 0, 1212, 1217, 3, 116, 58, 0, 1213, 1214, 5, 116, 0, 0, 1214, 1216, 3, 116, 58, 0, 1215, 1213, 1, 0, 0, 0, 1216, 1219, 1, 0, 0, 0, 1217, 1215, 1, 0, 0, 0, 1217, 1218, 1, 0, 0, 0, 1218, 1221, 1, 0, 0, 0, 1219, 1217, 1, 0, 0, 0, 1220, 1222, 5, 116, 0, 0, 1221, 1220, 1, 0, 0, 0, 1221, 1222, 1, 0, 0, 0, 1222, 143, 1, 0, 0, 0, 1223, 1224, 3, 160, 80, 0, 1224, 145, 1, 0, 0, 0, 1225, 1234, 5, 106, 0, 0, 1226, 1227, 5, 120, 0, 0, 1227, 1234, 7, 11, 0, 0, 1228, 1229, 5, 108, 0, 0, 1229, 1231, 5, 120, 0, 0, 1230, 1232, 7, 11, 0, 0, 1231, 1230, 1, 0, 0, 0, 1231, 1232, 1, 0, 0, 0, 1232, 1234, 1, 0, 0, 0, 1233, 1225, 1, 0, 0, 0, 1233, 1226, 1, 0, 0, 0, 1233, 1228, 1, 0, 0, 0, 1234, 147, 1, 0, 0, 0, 1235, 1237, 7, 12, 0, 0, 1236, 1235, 1, 0, 0, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1244, 1, 0, 0, 0, 1238, 1245, 3, 146, 73, 0, 1239, 1245, 5, 107, 0, 0, 1240, 1245, 5, 108, 0, 0, 1241, 1245, 5, 109, 0, 0, 1242, 1245, 5, 43, 0, 0, 1243, 1245, 5, 57, 0, 0, 1244, 1238, 1, 0, 0, 0, 1244, 1239, 1, 0, 0, 0, 1244, 1240, 1, 0, 0, 0, 1244, 1241, 1, 0, 0, 0, 1244, 1242, 1, 0, 0, 0, 1244, 1243, 1, 0, 0, 0, 1245, 149, 1, 0, 0, 0, 1246, 1250, 3, 148, 74, 0, 1247, 1250, 5, 110, 0, 0, 1248, 1250, 5, 59, 0, 0, 1249, 1246, 1, 0, 0, 0, 1249, 1247, 1, 0, 0, 0, 1249, 1248, 1, 0, 0, 0, 1250, 151, 1, 0, 0, 0, 1251, 1252, 7, 13, 0, 0, 1252, 153, 1, 0, 0, 0, 1253, 1254, 7, 14, 0, 0, 1254, 155, 1, 0, 0, 0, 1255, 1256, 7, 15, 0, 0, 1256, 157, 1, 0, 0, 0, 1257, 1260, 5, 105, 0, 0, 1258, 1260, 3, 156, 78, 0, 1259, 1257, 1, 0, 0, 0, 1259, 1258, 1, 0, 0, 0, 1260, 159, 1, 0, 0, 0, 1261, 1265, 5, 105, 0, 0, 1262, 1265, 3, 152, 76, 0, 1263, 1265, 3, 154, 77, 0, 1264, 1261, 1, 0, 0, 0, 1264, 1262, 1, 0, 0, 0, 1264, 1263, 1, 0, 0, 0, 1265, 161, 1, 0, 0, 0, 1266, 1267, 3, 166, 83, 0, 1267, 1268, 5, 122, 0, 0, 1268, 1269, 3, 148, 74, 0, 1269, 163, 1, 0, 0, 0, 1270, 1271, 5, 128, 0, 0, 1271, 1272, 3, 134, 67, 0, 1272, 1273, 5, 147, 0, 0, 1273, 165, 1, 0, 0, 0, 1274, 1277, 5, 110, 0, 0, 1275, 1277, 3, 168, 84, 0, 1276, 1274, 1, 0, 0, 0, 1276, 1275, 1, 0, 0, 0, 1277, 167, 1, 0, 0, 0, 1278, 1282, 5, 142, 0, 0, 1279, 1281, 3, 170, 85, 0, 1280, 1279, 1, 0, 0, 0, 1281, 1284, 1, 0, 0, 0, 1282, 1280, 1, 0, 0, 0, 1282, 1283, 1, 0, 0, 0, 1283, 1285, 1, 0, 0, 0, 1284, 1282, 1, 0, 0, 0, 1285, 1286, 5, 144, 0, 0, 1286, 169, 1, 0, 0, 0, 1287, 1288, 5, 157, 0, 0, 1288, 1289, 3, 116, 58, 0, 1289, 1290, 5, 147, 0, 0, 1290, 1293, 1, 0, 0, 0, 1291, 1293, 5, 156, 0, 0, 1292, 1287, 1, 0, 0, 0, 1292, 1291, 1, 0, 0, 0, 1293, 171, 1, 0, 0, 0, 1294, 1298, 5, 143, 0, 0, 1295, 1297, 3, 174, 87, 0, 1296, 1295, 1, 0, 0, 0, 1297, 1300, 1, 0, 0, 0, 1298, 1296, 1, 0, 0, 0, 1298, 1299, 1, 0, 0, 0, 1299, 1301, 1, 0, 0, 0, 1300, 1298, 1, 0, 0, 0, 1301, 1302, 5, 0, 0, 1, 1302, 173, 1, 0, 0, 0, 1303, 1304, 5, 159, 0, 0, 1304, 1305, 3, 116, 58, 0, 1305, 1306, 5, 147, 0, 0, 1306, 1309, 1, 0, 0, 0, 1307, 1309, 5, 158, 0, 0, 1308, 1303, 1, 0, 0, 0, 1308, 1307, 1, 0, 0, 0, 1309, 175, 1, 0, 0, 0, 168, 179, 186, 195, 202, 206, 220, 224, 227, 231, 234, 241, 245, 254, 259, 268, 276, 283, 287, 293, 298, 306, 313, 319, 331, 339, 353, 357, 362, 372, 381, 384, 388, 391, 395, 398, 401, 404, 407, 411, 415, 418, 421, 424, 428, 431, 440, 446, 467, 484, 501, 507, 513, 524, 526, 537, 540, 546, 554, 560, 562, 566, 571, 574, 577, 581, 585, 588, 590, 593, 597, 601, 604, 606, 608, 613, 624, 630, 637, 642, 646, 650, 656, 658, 665, 673, 676, 679, 698, 712, 728, 732, 743, 747, 758, 762, 769, 773, 780, 784, 789, 798, 802, 826, 843, 849, 852, 855, 865, 871, 874, 877, 885, 888, 892, 895, 909, 926, 931, 935, 941, 948, 960, 964, 967, 976, 990, 1029, 1037, 1039, 1041, 1049, 1053, 1057, 1065, 1069, 1078, 1082, 1084, 1094, 1105, 1114, 1121, 1134, 1141, 1145, 1157, 1163, 1166, 1173, 1185, 1191, 1195, 1201, 1208, 1217, 1221, 1231, 1233, 1236, 1244, 1249, 1259, 1264, 1276, 1282, 1292, 1298, 1308] \ No newline at end of file +[4, 1, 159, 1303, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 1, 0, 5, 0, 174, 8, 0, 10, 0, 12, 0, 177, 9, 0, 1, 0, 1, 0, 1, 1, 1, 1, 3, 1, 183, 8, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 192, 8, 3, 1, 4, 1, 4, 1, 4, 5, 4, 197, 8, 4, 10, 4, 12, 4, 200, 9, 4, 1, 4, 3, 4, 203, 8, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 217, 8, 5, 1, 6, 1, 6, 3, 6, 221, 8, 6, 1, 6, 3, 6, 224, 8, 6, 1, 7, 1, 7, 3, 7, 228, 8, 7, 1, 7, 3, 7, 231, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 238, 8, 8, 1, 8, 1, 8, 3, 8, 242, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 5, 9, 249, 8, 9, 10, 9, 12, 9, 252, 9, 9, 1, 9, 1, 9, 3, 9, 256, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 265, 8, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 273, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 3, 12, 280, 8, 12, 1, 12, 1, 12, 3, 12, 284, 8, 12, 1, 12, 1, 12, 1, 12, 1, 12, 3, 12, 290, 8, 12, 1, 12, 1, 12, 1, 12, 3, 12, 295, 8, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 303, 8, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 310, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 316, 8, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 3, 16, 328, 8, 16, 1, 17, 1, 17, 1, 18, 1, 18, 5, 18, 334, 8, 18, 10, 18, 12, 18, 337, 9, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 5, 20, 348, 8, 20, 10, 20, 12, 20, 351, 9, 20, 1, 20, 3, 20, 354, 8, 20, 1, 21, 1, 21, 1, 21, 3, 21, 359, 8, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 367, 8, 22, 10, 22, 12, 22, 370, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 3, 23, 378, 8, 23, 1, 24, 3, 24, 381, 8, 24, 1, 24, 1, 24, 3, 24, 385, 8, 24, 1, 24, 3, 24, 388, 8, 24, 1, 24, 1, 24, 3, 24, 392, 8, 24, 1, 24, 3, 24, 395, 8, 24, 1, 24, 3, 24, 398, 8, 24, 1, 24, 3, 24, 401, 8, 24, 1, 24, 3, 24, 404, 8, 24, 1, 24, 1, 24, 3, 24, 408, 8, 24, 1, 24, 1, 24, 3, 24, 412, 8, 24, 1, 24, 3, 24, 415, 8, 24, 1, 24, 3, 24, 418, 8, 24, 1, 24, 3, 24, 421, 8, 24, 1, 24, 1, 24, 3, 24, 425, 8, 24, 1, 24, 3, 24, 428, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 437, 8, 26, 1, 27, 1, 27, 1, 27, 1, 28, 3, 28, 443, 8, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 462, 8, 29, 10, 29, 12, 29, 465, 9, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 481, 8, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 498, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 504, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 510, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 521, 8, 36, 3, 36, 523, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 3, 39, 534, 8, 39, 1, 39, 3, 39, 537, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 543, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 551, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 557, 8, 39, 10, 39, 12, 39, 560, 9, 39, 1, 40, 3, 40, 563, 8, 40, 1, 40, 1, 40, 1, 40, 3, 40, 568, 8, 40, 1, 40, 3, 40, 571, 8, 40, 1, 40, 3, 40, 574, 8, 40, 1, 40, 1, 40, 3, 40, 578, 8, 40, 1, 40, 1, 40, 3, 40, 582, 8, 40, 1, 40, 3, 40, 585, 8, 40, 3, 40, 587, 8, 40, 1, 40, 3, 40, 590, 8, 40, 1, 40, 1, 40, 3, 40, 594, 8, 40, 1, 40, 1, 40, 3, 40, 598, 8, 40, 1, 40, 3, 40, 601, 8, 40, 3, 40, 603, 8, 40, 3, 40, 605, 8, 40, 1, 41, 1, 41, 1, 41, 3, 41, 610, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 621, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 3, 43, 627, 8, 43, 1, 44, 1, 44, 1, 44, 5, 44, 632, 8, 44, 10, 44, 12, 44, 635, 9, 44, 1, 45, 1, 45, 3, 45, 639, 8, 45, 1, 45, 1, 45, 3, 45, 643, 8, 45, 1, 45, 1, 45, 3, 45, 647, 8, 45, 1, 46, 1, 46, 1, 46, 1, 46, 3, 46, 653, 8, 46, 3, 46, 655, 8, 46, 1, 47, 1, 47, 1, 47, 5, 47, 660, 8, 47, 10, 47, 12, 47, 663, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 3, 49, 670, 8, 49, 1, 49, 3, 49, 673, 8, 49, 1, 49, 3, 49, 676, 8, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 695, 8, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 3, 54, 709, 8, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 723, 8, 56, 10, 56, 12, 56, 726, 9, 56, 1, 56, 3, 56, 729, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 738, 8, 56, 10, 56, 12, 56, 741, 9, 56, 1, 56, 3, 56, 744, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 753, 8, 56, 10, 56, 12, 56, 756, 9, 56, 1, 56, 3, 56, 759, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 766, 8, 56, 1, 56, 1, 56, 3, 56, 770, 8, 56, 1, 57, 1, 57, 1, 57, 5, 57, 775, 8, 57, 10, 57, 12, 57, 778, 9, 57, 1, 57, 3, 57, 781, 8, 57, 1, 58, 1, 58, 1, 58, 3, 58, 786, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 4, 58, 793, 8, 58, 11, 58, 12, 58, 794, 1, 58, 1, 58, 3, 58, 799, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 823, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 840, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 846, 8, 58, 1, 58, 3, 58, 849, 8, 58, 1, 58, 3, 58, 852, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 862, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 868, 8, 58, 1, 58, 3, 58, 871, 8, 58, 1, 58, 3, 58, 874, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 882, 8, 58, 1, 58, 3, 58, 885, 8, 58, 1, 58, 1, 58, 3, 58, 889, 8, 58, 1, 58, 3, 58, 892, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 906, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 923, 8, 58, 1, 58, 1, 58, 1, 58, 3, 58, 928, 8, 58, 1, 58, 1, 58, 1, 58, 3, 58, 933, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 939, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 946, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 958, 8, 58, 1, 58, 1, 58, 3, 58, 962, 8, 58, 1, 58, 3, 58, 965, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 974, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 988, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 1004, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 1033, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 1041, 8, 58, 5, 58, 1043, 8, 58, 10, 58, 12, 58, 1046, 9, 58, 1, 59, 1, 59, 1, 59, 1, 59, 5, 59, 1052, 8, 59, 10, 59, 12, 59, 1055, 9, 59, 1, 59, 3, 59, 1058, 8, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 5, 59, 1065, 8, 59, 10, 59, 12, 59, 1068, 9, 59, 1, 59, 3, 59, 1071, 8, 59, 1, 59, 1, 59, 3, 59, 1075, 8, 59, 1, 59, 1, 59, 1, 59, 3, 59, 1080, 8, 59, 1, 60, 1, 60, 1, 60, 5, 60, 1085, 8, 60, 10, 60, 12, 60, 1088, 9, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 5, 60, 1096, 8, 60, 10, 60, 12, 60, 1099, 9, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 3, 60, 1107, 8, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 3, 60, 1114, 8, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 1127, 8, 61, 1, 62, 1, 62, 1, 62, 5, 62, 1132, 8, 62, 10, 62, 12, 62, 1135, 9, 62, 1, 62, 3, 62, 1138, 8, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 3, 63, 1150, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 3, 64, 1156, 8, 64, 1, 64, 3, 64, 1159, 8, 64, 1, 65, 1, 65, 1, 65, 5, 65, 1164, 8, 65, 10, 65, 12, 65, 1167, 9, 65, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 1178, 8, 66, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 1184, 8, 66, 5, 66, 1186, 8, 66, 10, 66, 12, 66, 1189, 9, 66, 1, 67, 1, 67, 1, 67, 3, 67, 1194, 8, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 3, 68, 1201, 8, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 5, 69, 1208, 8, 69, 10, 69, 12, 69, 1211, 9, 69, 1, 69, 3, 69, 1214, 8, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 3, 71, 1224, 8, 71, 3, 71, 1226, 8, 71, 1, 72, 3, 72, 1229, 8, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 3, 72, 1237, 8, 72, 1, 73, 1, 73, 1, 73, 3, 73, 1242, 8, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 3, 77, 1252, 8, 77, 1, 78, 1, 78, 1, 78, 3, 78, 1257, 8, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 3, 81, 1269, 8, 81, 1, 82, 1, 82, 5, 82, 1273, 8, 82, 10, 82, 12, 82, 1276, 9, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 3, 83, 1285, 8, 83, 1, 84, 1, 84, 5, 84, 1289, 8, 84, 10, 84, 12, 84, 1292, 9, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 3, 85, 1301, 8, 85, 1, 85, 0, 3, 78, 116, 132, 86, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 0, 16, 2, 0, 18, 18, 74, 74, 2, 0, 44, 44, 51, 51, 3, 0, 1, 1, 4, 4, 8, 8, 4, 0, 1, 1, 3, 4, 8, 8, 80, 80, 2, 0, 51, 51, 73, 73, 2, 0, 1, 1, 4, 4, 2, 0, 7, 7, 22, 23, 2, 0, 30, 30, 49, 49, 2, 0, 71, 71, 76, 76, 3, 0, 10, 10, 50, 50, 90, 90, 2, 0, 41, 41, 53, 53, 1, 0, 107, 108, 2, 0, 118, 118, 139, 139, 7, 0, 21, 21, 38, 38, 55, 56, 70, 70, 78, 78, 97, 97, 103, 103, 16, 0, 1, 13, 15, 20, 22, 28, 30, 30, 32, 37, 39, 42, 44, 51, 53, 54, 58, 58, 60, 69, 71, 77, 79, 83, 85, 92, 94, 96, 98, 99, 101, 102, 4, 0, 20, 20, 30, 30, 39, 39, 48, 48, 1475, 0, 175, 1, 0, 0, 0, 2, 182, 1, 0, 0, 0, 4, 184, 1, 0, 0, 0, 6, 186, 1, 0, 0, 0, 8, 193, 1, 0, 0, 0, 10, 216, 1, 0, 0, 0, 12, 218, 1, 0, 0, 0, 14, 225, 1, 0, 0, 0, 16, 232, 1, 0, 0, 0, 18, 245, 1, 0, 0, 0, 20, 257, 1, 0, 0, 0, 22, 266, 1, 0, 0, 0, 24, 274, 1, 0, 0, 0, 26, 296, 1, 0, 0, 0, 28, 311, 1, 0, 0, 0, 30, 320, 1, 0, 0, 0, 32, 325, 1, 0, 0, 0, 34, 329, 1, 0, 0, 0, 36, 331, 1, 0, 0, 0, 38, 340, 1, 0, 0, 0, 40, 344, 1, 0, 0, 0, 42, 358, 1, 0, 0, 0, 44, 362, 1, 0, 0, 0, 46, 377, 1, 0, 0, 0, 48, 380, 1, 0, 0, 0, 50, 429, 1, 0, 0, 0, 52, 432, 1, 0, 0, 0, 54, 438, 1, 0, 0, 0, 56, 442, 1, 0, 0, 0, 58, 448, 1, 0, 0, 0, 60, 466, 1, 0, 0, 0, 62, 469, 1, 0, 0, 0, 64, 472, 1, 0, 0, 0, 66, 482, 1, 0, 0, 0, 68, 485, 1, 0, 0, 0, 70, 489, 1, 0, 0, 0, 72, 522, 1, 0, 0, 0, 74, 524, 1, 0, 0, 0, 76, 527, 1, 0, 0, 0, 78, 542, 1, 0, 0, 0, 80, 604, 1, 0, 0, 0, 82, 609, 1, 0, 0, 0, 84, 620, 1, 0, 0, 0, 86, 622, 1, 0, 0, 0, 88, 628, 1, 0, 0, 0, 90, 636, 1, 0, 0, 0, 92, 654, 1, 0, 0, 0, 94, 656, 1, 0, 0, 0, 96, 664, 1, 0, 0, 0, 98, 669, 1, 0, 0, 0, 100, 677, 1, 0, 0, 0, 102, 681, 1, 0, 0, 0, 104, 685, 1, 0, 0, 0, 106, 694, 1, 0, 0, 0, 108, 708, 1, 0, 0, 0, 110, 710, 1, 0, 0, 0, 112, 769, 1, 0, 0, 0, 114, 771, 1, 0, 0, 0, 116, 932, 1, 0, 0, 0, 118, 1074, 1, 0, 0, 0, 120, 1113, 1, 0, 0, 0, 122, 1126, 1, 0, 0, 0, 124, 1128, 1, 0, 0, 0, 126, 1149, 1, 0, 0, 0, 128, 1158, 1, 0, 0, 0, 130, 1160, 1, 0, 0, 0, 132, 1177, 1, 0, 0, 0, 134, 1190, 1, 0, 0, 0, 136, 1200, 1, 0, 0, 0, 138, 1204, 1, 0, 0, 0, 140, 1215, 1, 0, 0, 0, 142, 1225, 1, 0, 0, 0, 144, 1228, 1, 0, 0, 0, 146, 1241, 1, 0, 0, 0, 148, 1243, 1, 0, 0, 0, 150, 1245, 1, 0, 0, 0, 152, 1247, 1, 0, 0, 0, 154, 1251, 1, 0, 0, 0, 156, 1256, 1, 0, 0, 0, 158, 1258, 1, 0, 0, 0, 160, 1262, 1, 0, 0, 0, 162, 1268, 1, 0, 0, 0, 164, 1270, 1, 0, 0, 0, 166, 1284, 1, 0, 0, 0, 168, 1286, 1, 0, 0, 0, 170, 1300, 1, 0, 0, 0, 172, 174, 3, 2, 1, 0, 173, 172, 1, 0, 0, 0, 174, 177, 1, 0, 0, 0, 175, 173, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 178, 1, 0, 0, 0, 177, 175, 1, 0, 0, 0, 178, 179, 5, 0, 0, 1, 179, 1, 1, 0, 0, 0, 180, 183, 3, 6, 3, 0, 181, 183, 3, 10, 5, 0, 182, 180, 1, 0, 0, 0, 182, 181, 1, 0, 0, 0, 183, 3, 1, 0, 0, 0, 184, 185, 3, 116, 58, 0, 185, 5, 1, 0, 0, 0, 186, 187, 5, 52, 0, 0, 187, 191, 3, 156, 78, 0, 188, 189, 5, 115, 0, 0, 189, 190, 5, 122, 0, 0, 190, 192, 3, 4, 2, 0, 191, 188, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 7, 1, 0, 0, 0, 193, 198, 3, 156, 78, 0, 194, 195, 5, 116, 0, 0, 195, 197, 3, 156, 78, 0, 196, 194, 1, 0, 0, 0, 197, 200, 1, 0, 0, 0, 198, 196, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 202, 1, 0, 0, 0, 200, 198, 1, 0, 0, 0, 201, 203, 5, 116, 0, 0, 202, 201, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 9, 1, 0, 0, 0, 204, 217, 3, 12, 6, 0, 205, 217, 3, 14, 7, 0, 206, 217, 3, 18, 9, 0, 207, 217, 3, 20, 10, 0, 208, 217, 3, 22, 11, 0, 209, 217, 3, 26, 13, 0, 210, 217, 3, 24, 12, 0, 211, 217, 3, 28, 14, 0, 212, 217, 3, 30, 15, 0, 213, 217, 3, 36, 18, 0, 214, 217, 3, 32, 16, 0, 215, 217, 3, 34, 17, 0, 216, 204, 1, 0, 0, 0, 216, 205, 1, 0, 0, 0, 216, 206, 1, 0, 0, 0, 216, 207, 1, 0, 0, 0, 216, 208, 1, 0, 0, 0, 216, 209, 1, 0, 0, 0, 216, 210, 1, 0, 0, 0, 216, 211, 1, 0, 0, 0, 216, 212, 1, 0, 0, 0, 216, 213, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 215, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 220, 5, 72, 0, 0, 219, 221, 3, 4, 2, 0, 220, 219, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 223, 1, 0, 0, 0, 222, 224, 5, 150, 0, 0, 223, 222, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 13, 1, 0, 0, 0, 225, 227, 5, 84, 0, 0, 226, 228, 3, 4, 2, 0, 227, 226, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 230, 1, 0, 0, 0, 229, 231, 5, 150, 0, 0, 230, 229, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 15, 1, 0, 0, 0, 232, 241, 5, 14, 0, 0, 233, 234, 5, 130, 0, 0, 234, 237, 3, 156, 78, 0, 235, 236, 5, 115, 0, 0, 236, 238, 3, 156, 78, 0, 237, 235, 1, 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 5, 149, 0, 0, 240, 242, 1, 0, 0, 0, 241, 233, 1, 0, 0, 0, 241, 242, 1, 0, 0, 0, 242, 243, 1, 0, 0, 0, 243, 244, 3, 36, 18, 0, 244, 17, 1, 0, 0, 0, 245, 246, 5, 93, 0, 0, 246, 250, 3, 36, 18, 0, 247, 249, 3, 16, 8, 0, 248, 247, 1, 0, 0, 0, 249, 252, 1, 0, 0, 0, 250, 248, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, 255, 1, 0, 0, 0, 252, 250, 1, 0, 0, 0, 253, 254, 5, 29, 0, 0, 254, 256, 3, 36, 18, 0, 255, 253, 1, 0, 0, 0, 255, 256, 1, 0, 0, 0, 256, 19, 1, 0, 0, 0, 257, 258, 5, 40, 0, 0, 258, 259, 5, 130, 0, 0, 259, 260, 3, 4, 2, 0, 260, 261, 5, 149, 0, 0, 261, 264, 3, 10, 5, 0, 262, 263, 5, 25, 0, 0, 263, 265, 3, 10, 5, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 21, 1, 0, 0, 0, 266, 267, 5, 100, 0, 0, 267, 268, 5, 130, 0, 0, 268, 269, 3, 4, 2, 0, 269, 270, 5, 149, 0, 0, 270, 272, 3, 10, 5, 0, 271, 273, 5, 150, 0, 0, 272, 271, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 23, 1, 0, 0, 0, 274, 275, 5, 33, 0, 0, 275, 279, 5, 130, 0, 0, 276, 280, 3, 6, 3, 0, 277, 280, 3, 30, 15, 0, 278, 280, 3, 4, 2, 0, 279, 276, 1, 0, 0, 0, 279, 277, 1, 0, 0, 0, 279, 278, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 283, 5, 150, 0, 0, 282, 284, 3, 4, 2, 0, 283, 282, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 289, 5, 150, 0, 0, 286, 290, 3, 6, 3, 0, 287, 290, 3, 30, 15, 0, 288, 290, 3, 4, 2, 0, 289, 286, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 289, 288, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 292, 5, 149, 0, 0, 292, 294, 3, 10, 5, 0, 293, 295, 5, 150, 0, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 25, 1, 0, 0, 0, 296, 297, 5, 33, 0, 0, 297, 298, 5, 130, 0, 0, 298, 299, 5, 52, 0, 0, 299, 302, 3, 156, 78, 0, 300, 301, 5, 116, 0, 0, 301, 303, 3, 156, 78, 0, 302, 300, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 304, 1, 0, 0, 0, 304, 305, 5, 42, 0, 0, 305, 306, 3, 4, 2, 0, 306, 307, 5, 149, 0, 0, 307, 309, 3, 10, 5, 0, 308, 310, 5, 150, 0, 0, 309, 308, 1, 0, 0, 0, 309, 310, 1, 0, 0, 0, 310, 27, 1, 0, 0, 0, 311, 312, 5, 31, 0, 0, 312, 313, 3, 156, 78, 0, 313, 315, 5, 130, 0, 0, 314, 316, 3, 8, 4, 0, 315, 314, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 317, 1, 0, 0, 0, 317, 318, 5, 149, 0, 0, 318, 319, 3, 36, 18, 0, 319, 29, 1, 0, 0, 0, 320, 321, 3, 4, 2, 0, 321, 322, 5, 115, 0, 0, 322, 323, 5, 122, 0, 0, 323, 324, 3, 4, 2, 0, 324, 31, 1, 0, 0, 0, 325, 327, 3, 4, 2, 0, 326, 328, 5, 150, 0, 0, 327, 326, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 33, 1, 0, 0, 0, 329, 330, 5, 150, 0, 0, 330, 35, 1, 0, 0, 0, 331, 335, 5, 128, 0, 0, 332, 334, 3, 2, 1, 0, 333, 332, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 333, 1, 0, 0, 0, 335, 336, 1, 0, 0, 0, 336, 338, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 338, 339, 5, 147, 0, 0, 339, 37, 1, 0, 0, 0, 340, 341, 3, 4, 2, 0, 341, 342, 5, 115, 0, 0, 342, 343, 3, 4, 2, 0, 343, 39, 1, 0, 0, 0, 344, 349, 3, 38, 19, 0, 345, 346, 5, 116, 0, 0, 346, 348, 3, 38, 19, 0, 347, 345, 1, 0, 0, 0, 348, 351, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 349, 350, 1, 0, 0, 0, 350, 353, 1, 0, 0, 0, 351, 349, 1, 0, 0, 0, 352, 354, 5, 116, 0, 0, 353, 352, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 41, 1, 0, 0, 0, 355, 359, 3, 44, 22, 0, 356, 359, 3, 48, 24, 0, 357, 359, 3, 120, 60, 0, 358, 355, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 357, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 361, 5, 0, 0, 1, 361, 43, 1, 0, 0, 0, 362, 368, 3, 46, 23, 0, 363, 364, 5, 95, 0, 0, 364, 365, 5, 1, 0, 0, 365, 367, 3, 46, 23, 0, 366, 363, 1, 0, 0, 0, 367, 370, 1, 0, 0, 0, 368, 366, 1, 0, 0, 0, 368, 369, 1, 0, 0, 0, 369, 45, 1, 0, 0, 0, 370, 368, 1, 0, 0, 0, 371, 378, 3, 48, 24, 0, 372, 373, 5, 130, 0, 0, 373, 374, 3, 44, 22, 0, 374, 375, 5, 149, 0, 0, 375, 378, 1, 0, 0, 0, 376, 378, 3, 160, 80, 0, 377, 371, 1, 0, 0, 0, 377, 372, 1, 0, 0, 0, 377, 376, 1, 0, 0, 0, 378, 47, 1, 0, 0, 0, 379, 381, 3, 50, 25, 0, 380, 379, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 382, 1, 0, 0, 0, 382, 384, 5, 79, 0, 0, 383, 385, 5, 24, 0, 0, 384, 383, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 387, 1, 0, 0, 0, 386, 388, 3, 52, 26, 0, 387, 386, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 391, 3, 114, 57, 0, 390, 392, 3, 54, 27, 0, 391, 390, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 394, 1, 0, 0, 0, 393, 395, 3, 56, 28, 0, 394, 393, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 397, 1, 0, 0, 0, 396, 398, 3, 60, 30, 0, 397, 396, 1, 0, 0, 0, 397, 398, 1, 0, 0, 0, 398, 400, 1, 0, 0, 0, 399, 401, 3, 62, 31, 0, 400, 399, 1, 0, 0, 0, 400, 401, 1, 0, 0, 0, 401, 403, 1, 0, 0, 0, 402, 404, 3, 64, 32, 0, 403, 402, 1, 0, 0, 0, 403, 404, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 406, 5, 102, 0, 0, 406, 408, 7, 0, 0, 0, 407, 405, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 411, 1, 0, 0, 0, 409, 410, 5, 102, 0, 0, 410, 412, 5, 89, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 414, 1, 0, 0, 0, 413, 415, 3, 66, 33, 0, 414, 413, 1, 0, 0, 0, 414, 415, 1, 0, 0, 0, 415, 417, 1, 0, 0, 0, 416, 418, 3, 58, 29, 0, 417, 416, 1, 0, 0, 0, 417, 418, 1, 0, 0, 0, 418, 420, 1, 0, 0, 0, 419, 421, 3, 68, 34, 0, 420, 419, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 424, 1, 0, 0, 0, 422, 425, 3, 72, 36, 0, 423, 425, 3, 74, 37, 0, 424, 422, 1, 0, 0, 0, 424, 423, 1, 0, 0, 0, 424, 425, 1, 0, 0, 0, 425, 427, 1, 0, 0, 0, 426, 428, 3, 76, 38, 0, 427, 426, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 49, 1, 0, 0, 0, 429, 430, 5, 102, 0, 0, 430, 431, 3, 124, 62, 0, 431, 51, 1, 0, 0, 0, 432, 433, 5, 88, 0, 0, 433, 436, 5, 108, 0, 0, 434, 435, 5, 102, 0, 0, 435, 437, 5, 85, 0, 0, 436, 434, 1, 0, 0, 0, 436, 437, 1, 0, 0, 0, 437, 53, 1, 0, 0, 0, 438, 439, 5, 34, 0, 0, 439, 440, 3, 78, 39, 0, 440, 55, 1, 0, 0, 0, 441, 443, 7, 1, 0, 0, 442, 441, 1, 0, 0, 0, 442, 443, 1, 0, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 5, 5, 0, 0, 445, 446, 5, 47, 0, 0, 446, 447, 3, 114, 57, 0, 447, 57, 1, 0, 0, 0, 448, 449, 5, 101, 0, 0, 449, 450, 3, 156, 78, 0, 450, 451, 5, 6, 0, 0, 451, 452, 5, 130, 0, 0, 452, 453, 3, 98, 49, 0, 453, 463, 5, 149, 0, 0, 454, 455, 5, 116, 0, 0, 455, 456, 3, 156, 78, 0, 456, 457, 5, 6, 0, 0, 457, 458, 5, 130, 0, 0, 458, 459, 3, 98, 49, 0, 459, 460, 5, 149, 0, 0, 460, 462, 1, 0, 0, 0, 461, 454, 1, 0, 0, 0, 462, 465, 1, 0, 0, 0, 463, 461, 1, 0, 0, 0, 463, 464, 1, 0, 0, 0, 464, 59, 1, 0, 0, 0, 465, 463, 1, 0, 0, 0, 466, 467, 5, 69, 0, 0, 467, 468, 3, 116, 58, 0, 468, 61, 1, 0, 0, 0, 469, 470, 5, 99, 0, 0, 470, 471, 3, 116, 58, 0, 471, 63, 1, 0, 0, 0, 472, 473, 5, 36, 0, 0, 473, 480, 5, 11, 0, 0, 474, 475, 7, 0, 0, 0, 475, 476, 5, 130, 0, 0, 476, 477, 3, 114, 57, 0, 477, 478, 5, 149, 0, 0, 478, 481, 1, 0, 0, 0, 479, 481, 3, 114, 57, 0, 480, 474, 1, 0, 0, 0, 480, 479, 1, 0, 0, 0, 481, 65, 1, 0, 0, 0, 482, 483, 5, 37, 0, 0, 483, 484, 3, 116, 58, 0, 484, 67, 1, 0, 0, 0, 485, 486, 5, 64, 0, 0, 486, 487, 5, 11, 0, 0, 487, 488, 3, 88, 44, 0, 488, 69, 1, 0, 0, 0, 489, 490, 5, 64, 0, 0, 490, 491, 5, 11, 0, 0, 491, 492, 3, 114, 57, 0, 492, 71, 1, 0, 0, 0, 493, 494, 5, 54, 0, 0, 494, 497, 3, 116, 58, 0, 495, 496, 5, 116, 0, 0, 496, 498, 3, 116, 58, 0, 497, 495, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 503, 1, 0, 0, 0, 499, 500, 5, 102, 0, 0, 500, 504, 5, 85, 0, 0, 501, 502, 5, 11, 0, 0, 502, 504, 3, 114, 57, 0, 503, 499, 1, 0, 0, 0, 503, 501, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 523, 1, 0, 0, 0, 505, 506, 5, 54, 0, 0, 506, 509, 3, 116, 58, 0, 507, 508, 5, 102, 0, 0, 508, 510, 5, 85, 0, 0, 509, 507, 1, 0, 0, 0, 509, 510, 1, 0, 0, 0, 510, 511, 1, 0, 0, 0, 511, 512, 5, 61, 0, 0, 512, 513, 3, 116, 58, 0, 513, 523, 1, 0, 0, 0, 514, 515, 5, 54, 0, 0, 515, 516, 3, 116, 58, 0, 516, 517, 5, 61, 0, 0, 517, 520, 3, 116, 58, 0, 518, 519, 5, 11, 0, 0, 519, 521, 3, 114, 57, 0, 520, 518, 1, 0, 0, 0, 520, 521, 1, 0, 0, 0, 521, 523, 1, 0, 0, 0, 522, 493, 1, 0, 0, 0, 522, 505, 1, 0, 0, 0, 522, 514, 1, 0, 0, 0, 523, 73, 1, 0, 0, 0, 524, 525, 5, 61, 0, 0, 525, 526, 3, 116, 58, 0, 526, 75, 1, 0, 0, 0, 527, 528, 5, 81, 0, 0, 528, 529, 3, 94, 47, 0, 529, 77, 1, 0, 0, 0, 530, 531, 6, 39, -1, 0, 531, 533, 3, 132, 66, 0, 532, 534, 5, 28, 0, 0, 533, 532, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 536, 1, 0, 0, 0, 535, 537, 3, 86, 43, 0, 536, 535, 1, 0, 0, 0, 536, 537, 1, 0, 0, 0, 537, 543, 1, 0, 0, 0, 538, 539, 5, 130, 0, 0, 539, 540, 3, 78, 39, 0, 540, 541, 5, 149, 0, 0, 541, 543, 1, 0, 0, 0, 542, 530, 1, 0, 0, 0, 542, 538, 1, 0, 0, 0, 543, 558, 1, 0, 0, 0, 544, 545, 10, 3, 0, 0, 545, 546, 3, 82, 41, 0, 546, 547, 3, 78, 39, 4, 547, 557, 1, 0, 0, 0, 548, 550, 10, 4, 0, 0, 549, 551, 3, 80, 40, 0, 550, 549, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 5, 47, 0, 0, 553, 554, 3, 78, 39, 0, 554, 555, 3, 84, 42, 0, 555, 557, 1, 0, 0, 0, 556, 544, 1, 0, 0, 0, 556, 548, 1, 0, 0, 0, 557, 560, 1, 0, 0, 0, 558, 556, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 79, 1, 0, 0, 0, 560, 558, 1, 0, 0, 0, 561, 563, 7, 2, 0, 0, 562, 561, 1, 0, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 571, 5, 44, 0, 0, 565, 567, 5, 44, 0, 0, 566, 568, 7, 2, 0, 0, 567, 566, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 571, 1, 0, 0, 0, 569, 571, 7, 2, 0, 0, 570, 562, 1, 0, 0, 0, 570, 565, 1, 0, 0, 0, 570, 569, 1, 0, 0, 0, 571, 605, 1, 0, 0, 0, 572, 574, 7, 3, 0, 0, 573, 572, 1, 0, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 577, 7, 4, 0, 0, 576, 578, 5, 65, 0, 0, 577, 576, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 587, 1, 0, 0, 0, 579, 581, 7, 4, 0, 0, 580, 582, 5, 65, 0, 0, 581, 580, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 584, 1, 0, 0, 0, 583, 585, 7, 3, 0, 0, 584, 583, 1, 0, 0, 0, 584, 585, 1, 0, 0, 0, 585, 587, 1, 0, 0, 0, 586, 573, 1, 0, 0, 0, 586, 579, 1, 0, 0, 0, 587, 605, 1, 0, 0, 0, 588, 590, 7, 5, 0, 0, 589, 588, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 591, 1, 0, 0, 0, 591, 593, 5, 35, 0, 0, 592, 594, 5, 65, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 603, 1, 0, 0, 0, 595, 597, 5, 35, 0, 0, 596, 598, 5, 65, 0, 0, 597, 596, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 600, 1, 0, 0, 0, 599, 601, 7, 5, 0, 0, 600, 599, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 603, 1, 0, 0, 0, 602, 589, 1, 0, 0, 0, 602, 595, 1, 0, 0, 0, 603, 605, 1, 0, 0, 0, 604, 570, 1, 0, 0, 0, 604, 586, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 81, 1, 0, 0, 0, 606, 607, 5, 17, 0, 0, 607, 610, 5, 47, 0, 0, 608, 610, 5, 116, 0, 0, 609, 606, 1, 0, 0, 0, 609, 608, 1, 0, 0, 0, 610, 83, 1, 0, 0, 0, 611, 612, 5, 62, 0, 0, 612, 621, 3, 114, 57, 0, 613, 614, 5, 96, 0, 0, 614, 615, 5, 130, 0, 0, 615, 616, 3, 114, 57, 0, 616, 617, 5, 149, 0, 0, 617, 621, 1, 0, 0, 0, 618, 619, 5, 96, 0, 0, 619, 621, 3, 114, 57, 0, 620, 611, 1, 0, 0, 0, 620, 613, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 621, 85, 1, 0, 0, 0, 622, 623, 5, 77, 0, 0, 623, 626, 3, 92, 46, 0, 624, 625, 5, 61, 0, 0, 625, 627, 3, 92, 46, 0, 626, 624, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 87, 1, 0, 0, 0, 628, 633, 3, 90, 45, 0, 629, 630, 5, 116, 0, 0, 630, 632, 3, 90, 45, 0, 631, 629, 1, 0, 0, 0, 632, 635, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 89, 1, 0, 0, 0, 635, 633, 1, 0, 0, 0, 636, 638, 3, 116, 58, 0, 637, 639, 7, 6, 0, 0, 638, 637, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 642, 1, 0, 0, 0, 640, 641, 5, 60, 0, 0, 641, 643, 7, 7, 0, 0, 642, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 646, 1, 0, 0, 0, 644, 645, 5, 16, 0, 0, 645, 647, 5, 110, 0, 0, 646, 644, 1, 0, 0, 0, 646, 647, 1, 0, 0, 0, 647, 91, 1, 0, 0, 0, 648, 655, 3, 160, 80, 0, 649, 652, 3, 144, 72, 0, 650, 651, 5, 151, 0, 0, 651, 653, 3, 144, 72, 0, 652, 650, 1, 0, 0, 0, 652, 653, 1, 0, 0, 0, 653, 655, 1, 0, 0, 0, 654, 648, 1, 0, 0, 0, 654, 649, 1, 0, 0, 0, 655, 93, 1, 0, 0, 0, 656, 661, 3, 96, 48, 0, 657, 658, 5, 116, 0, 0, 658, 660, 3, 96, 48, 0, 659, 657, 1, 0, 0, 0, 660, 663, 1, 0, 0, 0, 661, 659, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 95, 1, 0, 0, 0, 663, 661, 1, 0, 0, 0, 664, 665, 3, 156, 78, 0, 665, 666, 5, 122, 0, 0, 666, 667, 3, 146, 73, 0, 667, 97, 1, 0, 0, 0, 668, 670, 3, 100, 50, 0, 669, 668, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 672, 1, 0, 0, 0, 671, 673, 3, 102, 51, 0, 672, 671, 1, 0, 0, 0, 672, 673, 1, 0, 0, 0, 673, 675, 1, 0, 0, 0, 674, 676, 3, 104, 52, 0, 675, 674, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 99, 1, 0, 0, 0, 677, 678, 5, 67, 0, 0, 678, 679, 5, 11, 0, 0, 679, 680, 3, 114, 57, 0, 680, 101, 1, 0, 0, 0, 681, 682, 5, 64, 0, 0, 682, 683, 5, 11, 0, 0, 683, 684, 3, 88, 44, 0, 684, 103, 1, 0, 0, 0, 685, 686, 7, 8, 0, 0, 686, 687, 3, 106, 53, 0, 687, 105, 1, 0, 0, 0, 688, 695, 3, 108, 54, 0, 689, 690, 5, 9, 0, 0, 690, 691, 3, 108, 54, 0, 691, 692, 5, 2, 0, 0, 692, 693, 3, 108, 54, 0, 693, 695, 1, 0, 0, 0, 694, 688, 1, 0, 0, 0, 694, 689, 1, 0, 0, 0, 695, 107, 1, 0, 0, 0, 696, 697, 5, 19, 0, 0, 697, 709, 5, 75, 0, 0, 698, 699, 5, 94, 0, 0, 699, 709, 5, 68, 0, 0, 700, 701, 5, 94, 0, 0, 701, 709, 5, 32, 0, 0, 702, 703, 3, 144, 72, 0, 703, 704, 5, 68, 0, 0, 704, 709, 1, 0, 0, 0, 705, 706, 3, 144, 72, 0, 706, 707, 5, 32, 0, 0, 707, 709, 1, 0, 0, 0, 708, 696, 1, 0, 0, 0, 708, 698, 1, 0, 0, 0, 708, 700, 1, 0, 0, 0, 708, 702, 1, 0, 0, 0, 708, 705, 1, 0, 0, 0, 709, 109, 1, 0, 0, 0, 710, 711, 3, 116, 58, 0, 711, 712, 5, 0, 0, 1, 712, 111, 1, 0, 0, 0, 713, 770, 3, 156, 78, 0, 714, 715, 3, 156, 78, 0, 715, 716, 5, 130, 0, 0, 716, 717, 3, 156, 78, 0, 717, 724, 3, 112, 56, 0, 718, 719, 5, 116, 0, 0, 719, 720, 3, 156, 78, 0, 720, 721, 3, 112, 56, 0, 721, 723, 1, 0, 0, 0, 722, 718, 1, 0, 0, 0, 723, 726, 1, 0, 0, 0, 724, 722, 1, 0, 0, 0, 724, 725, 1, 0, 0, 0, 725, 728, 1, 0, 0, 0, 726, 724, 1, 0, 0, 0, 727, 729, 5, 116, 0, 0, 728, 727, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 730, 1, 0, 0, 0, 730, 731, 5, 149, 0, 0, 731, 770, 1, 0, 0, 0, 732, 733, 3, 156, 78, 0, 733, 734, 5, 130, 0, 0, 734, 739, 3, 158, 79, 0, 735, 736, 5, 116, 0, 0, 736, 738, 3, 158, 79, 0, 737, 735, 1, 0, 0, 0, 738, 741, 1, 0, 0, 0, 739, 737, 1, 0, 0, 0, 739, 740, 1, 0, 0, 0, 740, 743, 1, 0, 0, 0, 741, 739, 1, 0, 0, 0, 742, 744, 5, 116, 0, 0, 743, 742, 1, 0, 0, 0, 743, 744, 1, 0, 0, 0, 744, 745, 1, 0, 0, 0, 745, 746, 5, 149, 0, 0, 746, 770, 1, 0, 0, 0, 747, 748, 3, 156, 78, 0, 748, 749, 5, 130, 0, 0, 749, 754, 3, 112, 56, 0, 750, 751, 5, 116, 0, 0, 751, 753, 3, 112, 56, 0, 752, 750, 1, 0, 0, 0, 753, 756, 1, 0, 0, 0, 754, 752, 1, 0, 0, 0, 754, 755, 1, 0, 0, 0, 755, 758, 1, 0, 0, 0, 756, 754, 1, 0, 0, 0, 757, 759, 5, 116, 0, 0, 758, 757, 1, 0, 0, 0, 758, 759, 1, 0, 0, 0, 759, 760, 1, 0, 0, 0, 760, 761, 5, 149, 0, 0, 761, 770, 1, 0, 0, 0, 762, 763, 3, 156, 78, 0, 763, 765, 5, 130, 0, 0, 764, 766, 3, 114, 57, 0, 765, 764, 1, 0, 0, 0, 765, 766, 1, 0, 0, 0, 766, 767, 1, 0, 0, 0, 767, 768, 5, 149, 0, 0, 768, 770, 1, 0, 0, 0, 769, 713, 1, 0, 0, 0, 769, 714, 1, 0, 0, 0, 769, 732, 1, 0, 0, 0, 769, 747, 1, 0, 0, 0, 769, 762, 1, 0, 0, 0, 770, 113, 1, 0, 0, 0, 771, 776, 3, 116, 58, 0, 772, 773, 5, 116, 0, 0, 773, 775, 3, 116, 58, 0, 774, 772, 1, 0, 0, 0, 775, 778, 1, 0, 0, 0, 776, 774, 1, 0, 0, 0, 776, 777, 1, 0, 0, 0, 777, 780, 1, 0, 0, 0, 778, 776, 1, 0, 0, 0, 779, 781, 5, 116, 0, 0, 780, 779, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 115, 1, 0, 0, 0, 782, 783, 6, 58, -1, 0, 783, 785, 5, 12, 0, 0, 784, 786, 3, 116, 58, 0, 785, 784, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 792, 1, 0, 0, 0, 787, 788, 5, 98, 0, 0, 788, 789, 3, 116, 58, 0, 789, 790, 5, 83, 0, 0, 790, 791, 3, 116, 58, 0, 791, 793, 1, 0, 0, 0, 792, 787, 1, 0, 0, 0, 793, 794, 1, 0, 0, 0, 794, 792, 1, 0, 0, 0, 794, 795, 1, 0, 0, 0, 795, 798, 1, 0, 0, 0, 796, 797, 5, 25, 0, 0, 797, 799, 3, 116, 58, 0, 798, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 801, 5, 26, 0, 0, 801, 933, 1, 0, 0, 0, 802, 803, 5, 13, 0, 0, 803, 804, 5, 130, 0, 0, 804, 805, 3, 116, 58, 0, 805, 806, 5, 6, 0, 0, 806, 807, 3, 112, 56, 0, 807, 808, 5, 149, 0, 0, 808, 933, 1, 0, 0, 0, 809, 810, 5, 20, 0, 0, 810, 933, 5, 110, 0, 0, 811, 812, 5, 45, 0, 0, 812, 813, 3, 116, 58, 0, 813, 814, 3, 148, 74, 0, 814, 933, 1, 0, 0, 0, 815, 816, 5, 82, 0, 0, 816, 817, 5, 130, 0, 0, 817, 818, 3, 116, 58, 0, 818, 819, 5, 34, 0, 0, 819, 822, 3, 116, 58, 0, 820, 821, 5, 33, 0, 0, 821, 823, 3, 116, 58, 0, 822, 820, 1, 0, 0, 0, 822, 823, 1, 0, 0, 0, 823, 824, 1, 0, 0, 0, 824, 825, 5, 149, 0, 0, 825, 933, 1, 0, 0, 0, 826, 827, 5, 86, 0, 0, 827, 933, 5, 110, 0, 0, 828, 829, 5, 91, 0, 0, 829, 830, 5, 130, 0, 0, 830, 831, 7, 9, 0, 0, 831, 832, 3, 162, 81, 0, 832, 833, 5, 34, 0, 0, 833, 834, 3, 116, 58, 0, 834, 835, 5, 149, 0, 0, 835, 933, 1, 0, 0, 0, 836, 837, 3, 156, 78, 0, 837, 839, 5, 130, 0, 0, 838, 840, 3, 114, 57, 0, 839, 838, 1, 0, 0, 0, 839, 840, 1, 0, 0, 0, 840, 841, 1, 0, 0, 0, 841, 842, 5, 149, 0, 0, 842, 851, 1, 0, 0, 0, 843, 845, 5, 130, 0, 0, 844, 846, 5, 24, 0, 0, 845, 844, 1, 0, 0, 0, 845, 846, 1, 0, 0, 0, 846, 848, 1, 0, 0, 0, 847, 849, 3, 114, 57, 0, 848, 847, 1, 0, 0, 0, 848, 849, 1, 0, 0, 0, 849, 850, 1, 0, 0, 0, 850, 852, 5, 149, 0, 0, 851, 843, 1, 0, 0, 0, 851, 852, 1, 0, 0, 0, 852, 853, 1, 0, 0, 0, 853, 854, 5, 66, 0, 0, 854, 855, 5, 130, 0, 0, 855, 856, 3, 98, 49, 0, 856, 857, 5, 149, 0, 0, 857, 933, 1, 0, 0, 0, 858, 859, 3, 156, 78, 0, 859, 861, 5, 130, 0, 0, 860, 862, 3, 114, 57, 0, 861, 860, 1, 0, 0, 0, 861, 862, 1, 0, 0, 0, 862, 863, 1, 0, 0, 0, 863, 864, 5, 149, 0, 0, 864, 873, 1, 0, 0, 0, 865, 867, 5, 130, 0, 0, 866, 868, 5, 24, 0, 0, 867, 866, 1, 0, 0, 0, 867, 868, 1, 0, 0, 0, 868, 870, 1, 0, 0, 0, 869, 871, 3, 114, 57, 0, 870, 869, 1, 0, 0, 0, 870, 871, 1, 0, 0, 0, 871, 872, 1, 0, 0, 0, 872, 874, 5, 149, 0, 0, 873, 865, 1, 0, 0, 0, 873, 874, 1, 0, 0, 0, 874, 875, 1, 0, 0, 0, 875, 876, 5, 66, 0, 0, 876, 877, 3, 156, 78, 0, 877, 933, 1, 0, 0, 0, 878, 884, 3, 156, 78, 0, 879, 881, 5, 130, 0, 0, 880, 882, 3, 114, 57, 0, 881, 880, 1, 0, 0, 0, 881, 882, 1, 0, 0, 0, 882, 883, 1, 0, 0, 0, 883, 885, 5, 149, 0, 0, 884, 879, 1, 0, 0, 0, 884, 885, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 888, 5, 130, 0, 0, 887, 889, 5, 24, 0, 0, 888, 887, 1, 0, 0, 0, 888, 889, 1, 0, 0, 0, 889, 891, 1, 0, 0, 0, 890, 892, 3, 114, 57, 0, 891, 890, 1, 0, 0, 0, 891, 892, 1, 0, 0, 0, 892, 893, 1, 0, 0, 0, 893, 894, 5, 149, 0, 0, 894, 933, 1, 0, 0, 0, 895, 933, 3, 120, 60, 0, 896, 933, 3, 164, 82, 0, 897, 933, 3, 146, 73, 0, 898, 899, 5, 118, 0, 0, 899, 933, 3, 116, 58, 20, 900, 901, 5, 58, 0, 0, 901, 933, 3, 116, 58, 14, 902, 903, 3, 136, 68, 0, 903, 904, 5, 120, 0, 0, 904, 906, 1, 0, 0, 0, 905, 902, 1, 0, 0, 0, 905, 906, 1, 0, 0, 0, 906, 907, 1, 0, 0, 0, 907, 933, 5, 112, 0, 0, 908, 909, 5, 130, 0, 0, 909, 910, 3, 44, 22, 0, 910, 911, 5, 149, 0, 0, 911, 933, 1, 0, 0, 0, 912, 913, 5, 130, 0, 0, 913, 914, 3, 116, 58, 0, 914, 915, 5, 149, 0, 0, 915, 933, 1, 0, 0, 0, 916, 917, 5, 130, 0, 0, 917, 918, 3, 114, 57, 0, 918, 919, 5, 149, 0, 0, 919, 933, 1, 0, 0, 0, 920, 922, 5, 129, 0, 0, 921, 923, 3, 114, 57, 0, 922, 921, 1, 0, 0, 0, 922, 923, 1, 0, 0, 0, 923, 924, 1, 0, 0, 0, 924, 933, 5, 148, 0, 0, 925, 927, 5, 128, 0, 0, 926, 928, 3, 40, 20, 0, 927, 926, 1, 0, 0, 0, 927, 928, 1, 0, 0, 0, 928, 929, 1, 0, 0, 0, 929, 933, 5, 147, 0, 0, 930, 933, 3, 118, 59, 0, 931, 933, 3, 128, 64, 0, 932, 782, 1, 0, 0, 0, 932, 802, 1, 0, 0, 0, 932, 809, 1, 0, 0, 0, 932, 811, 1, 0, 0, 0, 932, 815, 1, 0, 0, 0, 932, 826, 1, 0, 0, 0, 932, 828, 1, 0, 0, 0, 932, 836, 1, 0, 0, 0, 932, 858, 1, 0, 0, 0, 932, 878, 1, 0, 0, 0, 932, 895, 1, 0, 0, 0, 932, 896, 1, 0, 0, 0, 932, 897, 1, 0, 0, 0, 932, 898, 1, 0, 0, 0, 932, 900, 1, 0, 0, 0, 932, 905, 1, 0, 0, 0, 932, 908, 1, 0, 0, 0, 932, 912, 1, 0, 0, 0, 932, 916, 1, 0, 0, 0, 932, 920, 1, 0, 0, 0, 932, 925, 1, 0, 0, 0, 932, 930, 1, 0, 0, 0, 932, 931, 1, 0, 0, 0, 933, 1044, 1, 0, 0, 0, 934, 938, 10, 19, 0, 0, 935, 939, 5, 112, 0, 0, 936, 939, 5, 151, 0, 0, 937, 939, 5, 138, 0, 0, 938, 935, 1, 0, 0, 0, 938, 936, 1, 0, 0, 0, 938, 937, 1, 0, 0, 0, 939, 940, 1, 0, 0, 0, 940, 1043, 3, 116, 58, 20, 941, 945, 10, 18, 0, 0, 942, 946, 5, 139, 0, 0, 943, 946, 5, 118, 0, 0, 944, 946, 5, 117, 0, 0, 945, 942, 1, 0, 0, 0, 945, 943, 1, 0, 0, 0, 945, 944, 1, 0, 0, 0, 946, 947, 1, 0, 0, 0, 947, 1043, 3, 116, 58, 19, 948, 973, 10, 17, 0, 0, 949, 974, 5, 121, 0, 0, 950, 974, 5, 122, 0, 0, 951, 974, 5, 133, 0, 0, 952, 974, 5, 131, 0, 0, 953, 974, 5, 132, 0, 0, 954, 974, 5, 123, 0, 0, 955, 974, 5, 124, 0, 0, 956, 958, 5, 58, 0, 0, 957, 956, 1, 0, 0, 0, 957, 958, 1, 0, 0, 0, 958, 959, 1, 0, 0, 0, 959, 961, 5, 42, 0, 0, 960, 962, 5, 15, 0, 0, 961, 960, 1, 0, 0, 0, 961, 962, 1, 0, 0, 0, 962, 974, 1, 0, 0, 0, 963, 965, 5, 58, 0, 0, 964, 963, 1, 0, 0, 0, 964, 965, 1, 0, 0, 0, 965, 966, 1, 0, 0, 0, 966, 974, 7, 10, 0, 0, 967, 974, 5, 145, 0, 0, 968, 974, 5, 146, 0, 0, 969, 974, 5, 135, 0, 0, 970, 974, 5, 126, 0, 0, 971, 974, 5, 127, 0, 0, 972, 974, 5, 134, 0, 0, 973, 949, 1, 0, 0, 0, 973, 950, 1, 0, 0, 0, 973, 951, 1, 0, 0, 0, 973, 952, 1, 0, 0, 0, 973, 953, 1, 0, 0, 0, 973, 954, 1, 0, 0, 0, 973, 955, 1, 0, 0, 0, 973, 957, 1, 0, 0, 0, 973, 964, 1, 0, 0, 0, 973, 967, 1, 0, 0, 0, 973, 968, 1, 0, 0, 0, 973, 969, 1, 0, 0, 0, 973, 970, 1, 0, 0, 0, 973, 971, 1, 0, 0, 0, 973, 972, 1, 0, 0, 0, 974, 975, 1, 0, 0, 0, 975, 1043, 3, 116, 58, 18, 976, 977, 10, 15, 0, 0, 977, 978, 5, 137, 0, 0, 978, 1043, 3, 116, 58, 16, 979, 980, 10, 13, 0, 0, 980, 981, 5, 2, 0, 0, 981, 1043, 3, 116, 58, 14, 982, 983, 10, 12, 0, 0, 983, 984, 5, 63, 0, 0, 984, 1043, 3, 116, 58, 13, 985, 987, 10, 11, 0, 0, 986, 988, 5, 58, 0, 0, 987, 986, 1, 0, 0, 0, 987, 988, 1, 0, 0, 0, 988, 989, 1, 0, 0, 0, 989, 990, 5, 9, 0, 0, 990, 991, 3, 116, 58, 0, 991, 992, 5, 2, 0, 0, 992, 993, 3, 116, 58, 12, 993, 1043, 1, 0, 0, 0, 994, 995, 10, 10, 0, 0, 995, 996, 5, 140, 0, 0, 996, 997, 3, 116, 58, 0, 997, 998, 5, 115, 0, 0, 998, 999, 3, 116, 58, 10, 999, 1043, 1, 0, 0, 0, 1000, 1001, 10, 30, 0, 0, 1001, 1003, 5, 130, 0, 0, 1002, 1004, 3, 114, 57, 0, 1003, 1002, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1043, 5, 149, 0, 0, 1006, 1007, 10, 26, 0, 0, 1007, 1008, 5, 129, 0, 0, 1008, 1009, 3, 116, 58, 0, 1009, 1010, 5, 148, 0, 0, 1010, 1043, 1, 0, 0, 0, 1011, 1012, 10, 25, 0, 0, 1012, 1013, 5, 120, 0, 0, 1013, 1043, 5, 108, 0, 0, 1014, 1015, 10, 24, 0, 0, 1015, 1016, 5, 120, 0, 0, 1016, 1043, 3, 156, 78, 0, 1017, 1018, 10, 23, 0, 0, 1018, 1019, 5, 136, 0, 0, 1019, 1020, 5, 129, 0, 0, 1020, 1021, 3, 116, 58, 0, 1021, 1022, 5, 148, 0, 0, 1022, 1043, 1, 0, 0, 0, 1023, 1024, 10, 22, 0, 0, 1024, 1025, 5, 136, 0, 0, 1025, 1043, 5, 108, 0, 0, 1026, 1027, 10, 21, 0, 0, 1027, 1028, 5, 136, 0, 0, 1028, 1043, 3, 156, 78, 0, 1029, 1030, 10, 16, 0, 0, 1030, 1032, 5, 46, 0, 0, 1031, 1033, 5, 58, 0, 0, 1032, 1031, 1, 0, 0, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1043, 5, 59, 0, 0, 1035, 1040, 10, 9, 0, 0, 1036, 1037, 5, 6, 0, 0, 1037, 1041, 3, 156, 78, 0, 1038, 1039, 5, 6, 0, 0, 1039, 1041, 5, 110, 0, 0, 1040, 1036, 1, 0, 0, 0, 1040, 1038, 1, 0, 0, 0, 1041, 1043, 1, 0, 0, 0, 1042, 934, 1, 0, 0, 0, 1042, 941, 1, 0, 0, 0, 1042, 948, 1, 0, 0, 0, 1042, 976, 1, 0, 0, 0, 1042, 979, 1, 0, 0, 0, 1042, 982, 1, 0, 0, 0, 1042, 985, 1, 0, 0, 0, 1042, 994, 1, 0, 0, 0, 1042, 1000, 1, 0, 0, 0, 1042, 1006, 1, 0, 0, 0, 1042, 1011, 1, 0, 0, 0, 1042, 1014, 1, 0, 0, 0, 1042, 1017, 1, 0, 0, 0, 1042, 1023, 1, 0, 0, 0, 1042, 1026, 1, 0, 0, 0, 1042, 1029, 1, 0, 0, 0, 1042, 1035, 1, 0, 0, 0, 1043, 1046, 1, 0, 0, 0, 1044, 1042, 1, 0, 0, 0, 1044, 1045, 1, 0, 0, 0, 1045, 117, 1, 0, 0, 0, 1046, 1044, 1, 0, 0, 0, 1047, 1048, 5, 130, 0, 0, 1048, 1053, 3, 156, 78, 0, 1049, 1050, 5, 116, 0, 0, 1050, 1052, 3, 156, 78, 0, 1051, 1049, 1, 0, 0, 0, 1052, 1055, 1, 0, 0, 0, 1053, 1051, 1, 0, 0, 0, 1053, 1054, 1, 0, 0, 0, 1054, 1057, 1, 0, 0, 0, 1055, 1053, 1, 0, 0, 0, 1056, 1058, 5, 116, 0, 0, 1057, 1056, 1, 0, 0, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 1, 0, 0, 0, 1059, 1060, 5, 149, 0, 0, 1060, 1075, 1, 0, 0, 0, 1061, 1066, 3, 156, 78, 0, 1062, 1063, 5, 116, 0, 0, 1063, 1065, 3, 156, 78, 0, 1064, 1062, 1, 0, 0, 0, 1065, 1068, 1, 0, 0, 0, 1066, 1064, 1, 0, 0, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1070, 1, 0, 0, 0, 1068, 1066, 1, 0, 0, 0, 1069, 1071, 5, 116, 0, 0, 1070, 1069, 1, 0, 0, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1075, 1, 0, 0, 0, 1072, 1073, 5, 130, 0, 0, 1073, 1075, 5, 149, 0, 0, 1074, 1047, 1, 0, 0, 0, 1074, 1061, 1, 0, 0, 0, 1074, 1072, 1, 0, 0, 0, 1075, 1076, 1, 0, 0, 0, 1076, 1079, 5, 111, 0, 0, 1077, 1080, 3, 116, 58, 0, 1078, 1080, 3, 36, 18, 0, 1079, 1077, 1, 0, 0, 0, 1079, 1078, 1, 0, 0, 0, 1080, 119, 1, 0, 0, 0, 1081, 1082, 5, 132, 0, 0, 1082, 1086, 3, 156, 78, 0, 1083, 1085, 3, 122, 61, 0, 1084, 1083, 1, 0, 0, 0, 1085, 1088, 1, 0, 0, 0, 1086, 1084, 1, 0, 0, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1089, 1, 0, 0, 0, 1088, 1086, 1, 0, 0, 0, 1089, 1090, 5, 151, 0, 0, 1090, 1091, 5, 124, 0, 0, 1091, 1114, 1, 0, 0, 0, 1092, 1093, 5, 132, 0, 0, 1093, 1097, 3, 156, 78, 0, 1094, 1096, 3, 122, 61, 0, 1095, 1094, 1, 0, 0, 0, 1096, 1099, 1, 0, 0, 0, 1097, 1095, 1, 0, 0, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1100, 1, 0, 0, 0, 1099, 1097, 1, 0, 0, 0, 1100, 1106, 5, 124, 0, 0, 1101, 1107, 3, 120, 60, 0, 1102, 1103, 5, 128, 0, 0, 1103, 1104, 3, 116, 58, 0, 1104, 1105, 5, 147, 0, 0, 1105, 1107, 1, 0, 0, 0, 1106, 1101, 1, 0, 0, 0, 1106, 1102, 1, 0, 0, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 5, 132, 0, 0, 1109, 1110, 5, 151, 0, 0, 1110, 1111, 3, 156, 78, 0, 1111, 1112, 5, 124, 0, 0, 1112, 1114, 1, 0, 0, 0, 1113, 1081, 1, 0, 0, 0, 1113, 1092, 1, 0, 0, 0, 1114, 121, 1, 0, 0, 0, 1115, 1116, 3, 156, 78, 0, 1116, 1117, 5, 122, 0, 0, 1117, 1118, 3, 162, 81, 0, 1118, 1127, 1, 0, 0, 0, 1119, 1120, 3, 156, 78, 0, 1120, 1121, 5, 122, 0, 0, 1121, 1122, 5, 128, 0, 0, 1122, 1123, 3, 116, 58, 0, 1123, 1124, 5, 147, 0, 0, 1124, 1127, 1, 0, 0, 0, 1125, 1127, 3, 156, 78, 0, 1126, 1115, 1, 0, 0, 0, 1126, 1119, 1, 0, 0, 0, 1126, 1125, 1, 0, 0, 0, 1127, 123, 1, 0, 0, 0, 1128, 1133, 3, 126, 63, 0, 1129, 1130, 5, 116, 0, 0, 1130, 1132, 3, 126, 63, 0, 1131, 1129, 1, 0, 0, 0, 1132, 1135, 1, 0, 0, 0, 1133, 1131, 1, 0, 0, 0, 1133, 1134, 1, 0, 0, 0, 1134, 1137, 1, 0, 0, 0, 1135, 1133, 1, 0, 0, 0, 1136, 1138, 5, 116, 0, 0, 1137, 1136, 1, 0, 0, 0, 1137, 1138, 1, 0, 0, 0, 1138, 125, 1, 0, 0, 0, 1139, 1140, 3, 156, 78, 0, 1140, 1141, 5, 6, 0, 0, 1141, 1142, 5, 130, 0, 0, 1142, 1143, 3, 44, 22, 0, 1143, 1144, 5, 149, 0, 0, 1144, 1150, 1, 0, 0, 0, 1145, 1146, 3, 116, 58, 0, 1146, 1147, 5, 6, 0, 0, 1147, 1148, 3, 156, 78, 0, 1148, 1150, 1, 0, 0, 0, 1149, 1139, 1, 0, 0, 0, 1149, 1145, 1, 0, 0, 0, 1150, 127, 1, 0, 0, 0, 1151, 1159, 3, 160, 80, 0, 1152, 1153, 3, 136, 68, 0, 1153, 1154, 5, 120, 0, 0, 1154, 1156, 1, 0, 0, 0, 1155, 1152, 1, 0, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 1, 0, 0, 0, 1157, 1159, 3, 130, 65, 0, 1158, 1151, 1, 0, 0, 0, 1158, 1155, 1, 0, 0, 0, 1159, 129, 1, 0, 0, 0, 1160, 1165, 3, 156, 78, 0, 1161, 1162, 5, 120, 0, 0, 1162, 1164, 3, 156, 78, 0, 1163, 1161, 1, 0, 0, 0, 1164, 1167, 1, 0, 0, 0, 1165, 1163, 1, 0, 0, 0, 1165, 1166, 1, 0, 0, 0, 1166, 131, 1, 0, 0, 0, 1167, 1165, 1, 0, 0, 0, 1168, 1169, 6, 66, -1, 0, 1169, 1178, 3, 136, 68, 0, 1170, 1178, 3, 134, 67, 0, 1171, 1172, 5, 130, 0, 0, 1172, 1173, 3, 44, 22, 0, 1173, 1174, 5, 149, 0, 0, 1174, 1178, 1, 0, 0, 0, 1175, 1178, 3, 120, 60, 0, 1176, 1178, 3, 160, 80, 0, 1177, 1168, 1, 0, 0, 0, 1177, 1170, 1, 0, 0, 0, 1177, 1171, 1, 0, 0, 0, 1177, 1175, 1, 0, 0, 0, 1177, 1176, 1, 0, 0, 0, 1178, 1187, 1, 0, 0, 0, 1179, 1183, 10, 3, 0, 0, 1180, 1184, 3, 154, 77, 0, 1181, 1182, 5, 6, 0, 0, 1182, 1184, 3, 156, 78, 0, 1183, 1180, 1, 0, 0, 0, 1183, 1181, 1, 0, 0, 0, 1184, 1186, 1, 0, 0, 0, 1185, 1179, 1, 0, 0, 0, 1186, 1189, 1, 0, 0, 0, 1187, 1185, 1, 0, 0, 0, 1187, 1188, 1, 0, 0, 0, 1188, 133, 1, 0, 0, 0, 1189, 1187, 1, 0, 0, 0, 1190, 1191, 3, 156, 78, 0, 1191, 1193, 5, 130, 0, 0, 1192, 1194, 3, 138, 69, 0, 1193, 1192, 1, 0, 0, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 5, 149, 0, 0, 1196, 135, 1, 0, 0, 0, 1197, 1198, 3, 140, 70, 0, 1198, 1199, 5, 120, 0, 0, 1199, 1201, 1, 0, 0, 0, 1200, 1197, 1, 0, 0, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 3, 156, 78, 0, 1203, 137, 1, 0, 0, 0, 1204, 1209, 3, 116, 58, 0, 1205, 1206, 5, 116, 0, 0, 1206, 1208, 3, 116, 58, 0, 1207, 1205, 1, 0, 0, 0, 1208, 1211, 1, 0, 0, 0, 1209, 1207, 1, 0, 0, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1213, 1, 0, 0, 0, 1211, 1209, 1, 0, 0, 0, 1212, 1214, 5, 116, 0, 0, 1213, 1212, 1, 0, 0, 0, 1213, 1214, 1, 0, 0, 0, 1214, 139, 1, 0, 0, 0, 1215, 1216, 3, 156, 78, 0, 1216, 141, 1, 0, 0, 0, 1217, 1226, 5, 106, 0, 0, 1218, 1219, 5, 120, 0, 0, 1219, 1226, 7, 11, 0, 0, 1220, 1221, 5, 108, 0, 0, 1221, 1223, 5, 120, 0, 0, 1222, 1224, 7, 11, 0, 0, 1223, 1222, 1, 0, 0, 0, 1223, 1224, 1, 0, 0, 0, 1224, 1226, 1, 0, 0, 0, 1225, 1217, 1, 0, 0, 0, 1225, 1218, 1, 0, 0, 0, 1225, 1220, 1, 0, 0, 0, 1226, 143, 1, 0, 0, 0, 1227, 1229, 7, 12, 0, 0, 1228, 1227, 1, 0, 0, 0, 1228, 1229, 1, 0, 0, 0, 1229, 1236, 1, 0, 0, 0, 1230, 1237, 3, 142, 71, 0, 1231, 1237, 5, 107, 0, 0, 1232, 1237, 5, 108, 0, 0, 1233, 1237, 5, 109, 0, 0, 1234, 1237, 5, 43, 0, 0, 1235, 1237, 5, 57, 0, 0, 1236, 1230, 1, 0, 0, 0, 1236, 1231, 1, 0, 0, 0, 1236, 1232, 1, 0, 0, 0, 1236, 1233, 1, 0, 0, 0, 1236, 1234, 1, 0, 0, 0, 1236, 1235, 1, 0, 0, 0, 1237, 145, 1, 0, 0, 0, 1238, 1242, 3, 144, 72, 0, 1239, 1242, 5, 110, 0, 0, 1240, 1242, 5, 59, 0, 0, 1241, 1238, 1, 0, 0, 0, 1241, 1239, 1, 0, 0, 0, 1241, 1240, 1, 0, 0, 0, 1242, 147, 1, 0, 0, 0, 1243, 1244, 7, 13, 0, 0, 1244, 149, 1, 0, 0, 0, 1245, 1246, 7, 14, 0, 0, 1246, 151, 1, 0, 0, 0, 1247, 1248, 7, 15, 0, 0, 1248, 153, 1, 0, 0, 0, 1249, 1252, 5, 105, 0, 0, 1250, 1252, 3, 152, 76, 0, 1251, 1249, 1, 0, 0, 0, 1251, 1250, 1, 0, 0, 0, 1252, 155, 1, 0, 0, 0, 1253, 1257, 5, 105, 0, 0, 1254, 1257, 3, 148, 74, 0, 1255, 1257, 3, 150, 75, 0, 1256, 1253, 1, 0, 0, 0, 1256, 1254, 1, 0, 0, 0, 1256, 1255, 1, 0, 0, 0, 1257, 157, 1, 0, 0, 0, 1258, 1259, 3, 162, 81, 0, 1259, 1260, 5, 122, 0, 0, 1260, 1261, 3, 144, 72, 0, 1261, 159, 1, 0, 0, 0, 1262, 1263, 5, 128, 0, 0, 1263, 1264, 3, 130, 65, 0, 1264, 1265, 5, 147, 0, 0, 1265, 161, 1, 0, 0, 0, 1266, 1269, 5, 110, 0, 0, 1267, 1269, 3, 164, 82, 0, 1268, 1266, 1, 0, 0, 0, 1268, 1267, 1, 0, 0, 0, 1269, 163, 1, 0, 0, 0, 1270, 1274, 5, 142, 0, 0, 1271, 1273, 3, 166, 83, 0, 1272, 1271, 1, 0, 0, 0, 1273, 1276, 1, 0, 0, 0, 1274, 1272, 1, 0, 0, 0, 1274, 1275, 1, 0, 0, 0, 1275, 1277, 1, 0, 0, 0, 1276, 1274, 1, 0, 0, 0, 1277, 1278, 5, 144, 0, 0, 1278, 165, 1, 0, 0, 0, 1279, 1280, 5, 157, 0, 0, 1280, 1281, 3, 116, 58, 0, 1281, 1282, 5, 147, 0, 0, 1282, 1285, 1, 0, 0, 0, 1283, 1285, 5, 156, 0, 0, 1284, 1279, 1, 0, 0, 0, 1284, 1283, 1, 0, 0, 0, 1285, 167, 1, 0, 0, 0, 1286, 1290, 5, 143, 0, 0, 1287, 1289, 3, 170, 85, 0, 1288, 1287, 1, 0, 0, 0, 1289, 1292, 1, 0, 0, 0, 1290, 1288, 1, 0, 0, 0, 1290, 1291, 1, 0, 0, 0, 1291, 1293, 1, 0, 0, 0, 1292, 1290, 1, 0, 0, 0, 1293, 1294, 5, 0, 0, 1, 1294, 169, 1, 0, 0, 0, 1295, 1296, 5, 159, 0, 0, 1296, 1297, 3, 116, 58, 0, 1297, 1298, 5, 147, 0, 0, 1298, 1301, 1, 0, 0, 0, 1299, 1301, 5, 158, 0, 0, 1300, 1295, 1, 0, 0, 0, 1300, 1299, 1, 0, 0, 0, 1301, 171, 1, 0, 0, 0, 167, 175, 182, 191, 198, 202, 216, 220, 223, 227, 230, 237, 241, 250, 255, 264, 272, 279, 283, 289, 294, 302, 309, 315, 327, 335, 349, 353, 358, 368, 377, 380, 384, 387, 391, 394, 397, 400, 403, 407, 411, 414, 417, 420, 424, 427, 436, 442, 463, 480, 497, 503, 509, 520, 522, 533, 536, 542, 550, 556, 558, 562, 567, 570, 573, 577, 581, 584, 586, 589, 593, 597, 600, 602, 604, 609, 620, 626, 633, 638, 642, 646, 652, 654, 661, 669, 672, 675, 694, 708, 724, 728, 739, 743, 754, 758, 765, 769, 776, 780, 785, 794, 798, 822, 839, 845, 848, 851, 861, 867, 870, 873, 881, 884, 888, 891, 905, 922, 927, 932, 938, 945, 957, 961, 964, 973, 987, 1003, 1032, 1040, 1042, 1044, 1053, 1057, 1066, 1070, 1074, 1079, 1086, 1097, 1106, 1113, 1126, 1133, 1137, 1149, 1155, 1158, 1165, 1177, 1183, 1187, 1193, 1200, 1209, 1213, 1223, 1225, 1228, 1236, 1241, 1251, 1256, 1268, 1274, 1284, 1290, 1300] \ No newline at end of file diff --git a/hogql_parser/HogQLParserBaseVisitor.cpp b/hogql_parser/HogQLParserBaseVisitor.cpp index 03bf38add14d5..b5a8af7e0f7aa 100644 --- a/hogql_parser/HogQLParserBaseVisitor.cpp +++ b/hogql_parser/HogQLParserBaseVisitor.cpp @@ -1,5 +1,5 @@ -// Generated from HogQLParser.g4 by ANTLR 4.13.1 +// Generated from HogQLParser.g4 by ANTLR 4.13.2 #include "HogQLParserBaseVisitor.h" diff --git a/hogql_parser/HogQLParserBaseVisitor.h b/hogql_parser/HogQLParserBaseVisitor.h index 7329e835cad57..80b4d5d487765 100644 --- a/hogql_parser/HogQLParserBaseVisitor.h +++ b/hogql_parser/HogQLParserBaseVisitor.h @@ -1,5 +1,5 @@ -// Generated from HogQLParser.g4 by ANTLR 4.13.1 +// Generated from HogQLParser.g4 by ANTLR 4.13.2 #pragma once @@ -375,6 +375,10 @@ class HogQLParserBaseVisitor : public HogQLParserVisitor { return visitChildren(ctx); } + virtual std::any visitColumnExprCall(HogQLParser::ColumnExprCallContext *ctx) override { + return visitChildren(ctx); + } + virtual std::any visitColumnExprArrayAccess(HogQLParser::ColumnExprArrayAccessContext *ctx) override { return visitChildren(ctx); } @@ -427,23 +431,19 @@ class HogQLParserBaseVisitor : public HogQLParserVisitor { return visitChildren(ctx); } - virtual std::any visitColumnExprIdentifier(HogQLParser::ColumnExprIdentifierContext *ctx) override { - return visitChildren(ctx); - } - - virtual std::any visitColumnExprFunction(HogQLParser::ColumnExprFunctionContext *ctx) override { + virtual std::any visitColumnExprLambda(HogQLParser::ColumnExprLambdaContext *ctx) override { return visitChildren(ctx); } - virtual std::any visitColumnExprAsterisk(HogQLParser::ColumnExprAsteriskContext *ctx) override { + virtual std::any visitColumnExprIdentifier(HogQLParser::ColumnExprIdentifierContext *ctx) override { return visitChildren(ctx); } - virtual std::any visitColumnArgList(HogQLParser::ColumnArgListContext *ctx) override { + virtual std::any visitColumnExprFunction(HogQLParser::ColumnExprFunctionContext *ctx) override { return visitChildren(ctx); } - virtual std::any visitColumnArgExpr(HogQLParser::ColumnArgExprContext *ctx) override { + virtual std::any visitColumnExprAsterisk(HogQLParser::ColumnExprAsteriskContext *ctx) override { return visitChildren(ctx); } diff --git a/hogql_parser/HogQLParserVisitor.cpp b/hogql_parser/HogQLParserVisitor.cpp index 82a2f14e4ec57..07a511ac187fc 100644 --- a/hogql_parser/HogQLParserVisitor.cpp +++ b/hogql_parser/HogQLParserVisitor.cpp @@ -1,5 +1,5 @@ -// Generated from HogQLParser.g4 by ANTLR 4.13.1 +// Generated from HogQLParser.g4 by ANTLR 4.13.2 #include "HogQLParserVisitor.h" diff --git a/hogql_parser/HogQLParserVisitor.h b/hogql_parser/HogQLParserVisitor.h index 8e4259a8209b1..0b9e1797ad836 100644 --- a/hogql_parser/HogQLParserVisitor.h +++ b/hogql_parser/HogQLParserVisitor.h @@ -1,5 +1,5 @@ -// Generated from HogQLParser.g4 by ANTLR 4.13.1 +// Generated from HogQLParser.g4 by ANTLR 4.13.2 #pragma once @@ -199,6 +199,8 @@ class HogQLParserVisitor : public antlr4::tree::AbstractParseTreeVisitor { virtual std::any visitColumnExprTuple(HogQLParser::ColumnExprTupleContext *context) = 0; + virtual std::any visitColumnExprCall(HogQLParser::ColumnExprCallContext *context) = 0; + virtual std::any visitColumnExprArrayAccess(HogQLParser::ColumnExprArrayAccessContext *context) = 0; virtual std::any visitColumnExprBetween(HogQLParser::ColumnExprBetweenContext *context) = 0; @@ -225,16 +227,14 @@ class HogQLParserVisitor : public antlr4::tree::AbstractParseTreeVisitor { virtual std::any visitColumnExprWinFunction(HogQLParser::ColumnExprWinFunctionContext *context) = 0; + virtual std::any visitColumnExprLambda(HogQLParser::ColumnExprLambdaContext *context) = 0; + virtual std::any visitColumnExprIdentifier(HogQLParser::ColumnExprIdentifierContext *context) = 0; virtual std::any visitColumnExprFunction(HogQLParser::ColumnExprFunctionContext *context) = 0; virtual std::any visitColumnExprAsterisk(HogQLParser::ColumnExprAsteriskContext *context) = 0; - virtual std::any visitColumnArgList(HogQLParser::ColumnArgListContext *context) = 0; - - virtual std::any visitColumnArgExpr(HogQLParser::ColumnArgExprContext *context) = 0; - virtual std::any visitColumnLambdaExpr(HogQLParser::ColumnLambdaExprContext *context) = 0; virtual std::any visitHogqlxTagElementClosed(HogQLParser::HogqlxTagElementClosedContext *context) = 0; diff --git a/hogql_parser/parser.cpp b/hogql_parser/parser.cpp index 54a1eeedd8e29..409f2fdb46f7b 100644 --- a/hogql_parser/parser.cpp +++ b/hogql_parser/parser.cpp @@ -2151,13 +2151,13 @@ class HogQLParseTreeConverter : public HogQLParserBaseVisitor { VISIT(ColumnExprNot) { RETURN_NEW_AST_NODE("Not", "{s:N}", "expr", visitAsPyObject(ctx->columnExpr())); } VISIT(ColumnExprWinFunctionTarget) { - auto column_expr_list_ctx = ctx->columnExprList(); + auto column_expr_list_ctx = ctx->columnExprs; string name = visitAsString(ctx->identifier(0)); string over_identifier = visitAsString(ctx->identifier(1)); PyObject* exprs = visitAsPyObjectOrEmptyList(column_expr_list_ctx); PyObject* args; try { - args = visitAsPyObjectOrEmptyList(ctx->columnArgList()); + args = visitAsPyObjectOrEmptyList(ctx->columnArgList); } catch (...) { Py_DECREF(exprs); throw; @@ -2170,11 +2170,11 @@ class HogQLParseTreeConverter : public HogQLParserBaseVisitor { VISIT(ColumnExprWinFunction) { string identifier = visitAsString(ctx->identifier()); - auto column_expr_list_ctx = ctx->columnExprList(); + auto column_expr_list_ctx = ctx->columnExprs; PyObject* exprs = visitAsPyObjectOrEmptyList(column_expr_list_ctx); PyObject* args; try { - args = visitAsPyObjectOrEmptyList(ctx->columnArgList()); + args = visitAsPyObjectOrEmptyList(ctx->columnArgList); } catch (...) { Py_DECREF(exprs); throw; @@ -2197,10 +2197,18 @@ class HogQLParseTreeConverter : public HogQLParserBaseVisitor { VISIT(ColumnExprFunction) { string name = visitAsString(ctx->identifier()); - PyObject* params = visitAsPyObjectOrNone(ctx->columnExprList()); + + // if two LPARENs ()(), make sure the first one is at least an empty list + PyObject* params; + if (ctx->LPAREN(1)) { + params = visitAsPyObjectOrEmptyList(ctx->columnExprs); + } else { + params = visitAsPyObjectOrNone(ctx->columnExprs); + } + PyObject* args; try { - args = visitAsPyObjectOrEmptyList(ctx->columnArgList()); + args = visitAsPyObjectOrEmptyList(ctx->columnArgList); } catch (...) { Py_DECREF(params); throw; @@ -2223,10 +2231,18 @@ class HogQLParseTreeConverter : public HogQLParserBaseVisitor { VISIT(ColumnExprTagElement) { return visit(ctx->hogqlxTagElement()); } - VISIT(ColumnArgList) { return visitPyListOfObjects(ctx->columnArgExpr()); } - VISIT(ColumnLambdaExpr) { - PyObject* expr = visitAsPyObject(ctx->columnExpr()); + PyObject* expr; + auto column_expr_ctx = ctx->columnExpr(); + auto block_ctx = ctx->block(); + if (!column_expr_ctx && !block_ctx) { + throw ParsingError("ColumnLambdaExpr must have either a columnExpr or a block"); + } + if (column_expr_ctx) { + expr = visitAsPyObject(column_expr_ctx); + } else { + expr = visitAsPyObject(block_ctx); + } PyObject* args; try { args = X_PyList_FromStrings(visitAsVectorOfStrings(ctx->identifier())); @@ -2575,6 +2591,18 @@ class HogQLParseTreeConverter : public HogQLParserBaseVisitor { RETURN_NEW_AST_NODE("Call", "{s:s, s:[NN]}", "name", "ifNull", "args", value, fallback); } + VISIT(ColumnExprCall) { + PyObject* expr = visitAsPyObject(ctx->columnExpr()); + PyObject* args; + try { + args = visitAsPyObjectOrEmptyList(ctx->columnExprList()); + } catch (...) { + Py_DECREF(expr); + throw; + } + RETURN_NEW_AST_NODE("ExprCall", "{s:N, s:N}", "expr", expr, "args", args); + } + VISIT(ColumnExprTemplateString) { return visit(ctx->templateString()); } VISIT(String) { diff --git a/hogql_parser/setup.py b/hogql_parser/setup.py index 1eb433f6d1d9a..ba14c773be681 100644 --- a/hogql_parser/setup.py +++ b/hogql_parser/setup.py @@ -32,7 +32,7 @@ setup( name="hogql_parser", - version="1.0.36", + version="1.0.38", url="https://github.com/PostHog/posthog/tree/master/hogql_parser", author="PostHog Inc.", author_email="hey@posthog.com", diff --git a/hogvm/README.md b/hogvm/README.md index 9c44d22a518d9..a2eeab5f0f6d6 100644 --- a/hogvm/README.md +++ b/hogvm/README.md @@ -12,7 +12,7 @@ HogQL Bytecode is a compact representation of a subset of the HogQL AST nodes. I 1 or 2 # [_H, op.INTEGER, 2, op.INTEGER, 1, op.OR, 2] not true # [_H, op.TRUE, op.NOT] properties.bla # [_H, op.STRING, "bla", op.STRING, "properties", op.GET_GLOBAL, 2] -call('arg', 'another') # [_H, op.STRING, "another", op.STRING, "arg", op.CALL, "call", 2] +call('arg', 'another') # [_H, op.STRING, "another", op.STRING, "arg", op.CALL_GLOBAL, "call", 2] 1 = 2 # [_H, op.INTEGER, 2, op.INTEGER, 1, op.EQ] 'bla' !~ 'a' # [_H, op.STRING, 'a', op.STRING, 'bla', op.NOT_REGEX] ``` diff --git a/hogvm/__tests__/__snapshots__/arrays.hoge b/hogvm/__tests__/__snapshots__/arrays.hoge index 6a8603832d3c1..4afcc23ad537a 100644 --- a/hogvm/__tests__/__snapshots__/arrays.hoge +++ b/hogvm/__tests__/__snapshots__/arrays.hoge @@ -1,4 +1,4 @@ -["_h", 43, 0, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 2, "print", 1, 35, 33, 1, 32, "2", 33, 3, 43, 3, 2, +["_H", 1, 43, 0, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 2, "print", 1, 35, 33, 1, 32, "2", 33, 3, 43, 3, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 2, 33, 4, 43, 3, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 33, 4, 43, 2, 43, 2, 33, 5, 43, 3, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 36, 0, 33, 2, 45, 2, "print", 1, 35, 36, 0, 33, 2, 48, 2, "print", 1, 35, 36, 0, 33, 2, 48, 2, "print", 1, 35, 36, 0, 33, 7, 48, 2, "print", 1, 35, 36, 0, 33, 7, 48, 2, "print", @@ -11,17 +11,17 @@ 35, 33, 1, 33, 2, 33, 3, 33, 4, 43, 2, 43, 2, 33, 5, 43, 3, 33, 2, 45, 33, 2, 45, 33, 2, 45, 2, "print", 1, 35, 32, "------", 2, "print", 1, 35, 33, 1, 33, 2, 33, 1, 33, 2, 33, 3, 43, 3, 43, 3, 36, 1, 33, 2, 33, 4, 46, 36, 1, 33, 1, 45, 2, "print", 1, 35, 36, 1, 33, 2, 45, 2, "print", 1, 35, 36, 1, 33, 3, 45, 2, "print", 1, 35, 36, 1, 33, 3, 45, 33, 3, -33, 8, 46, 36, 1, 2, "print", 1, 35, 32, "------", 2, "print", 1, 35, 33, 4, 33, 1, 33, 2, 33, 3, 43, 3, 2, -"arrayPushBack", 2, 2, "print", 1, 35, 33, 0, 33, 1, 33, 2, 33, 3, 43, 3, 2, "arrayPushFront", 2, 2, "print", 1, 35, 33, +33, 8, 46, 36, 1, 2, "print", 1, 35, 32, "------", 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 33, 4, 2, +"arrayPushBack", 2, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 33, 0, 2, "arrayPushFront", 2, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 2, "arrayPopBack", 1, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 2, "arrayPopFront", 1, 2, "print", 1, 35, 33, 3, 33, 2, 33, 1, 43, 3, 2, "arraySort", 1, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 2, -"arrayReverse", 1, 2, "print", 1, 35, 33, 3, 33, 2, 33, 1, 43, 3, 2, "arrayReverseSort", 1, 2, "print", 1, 35, 32, ",", -33, 1, 33, 2, 33, 3, 43, 3, 2, "arrayStringConcat", 2, 2, "print", 1, 35, 32, "-----", 2, "print", 1, 35, 33, 1, 33, 2, -33, 3, 33, 4, 43, 4, 36, 2, 2, "print", 1, 35, 33, 5, 36, 2, 2, "arrayPushBack", 2, 35, 36, 2, 2, "print", 1, 35, 33, 0, -36, 2, 2, "arrayPushFront", 2, 35, 36, 2, 2, "print", 1, 35, 36, 2, 2, "arrayPopBack", 1, 35, 36, 2, 2, "print", 1, 35, -36, 2, 2, "arrayPopFront", 1, 35, 36, 2, 2, "print", 1, 35, 36, 2, 2, "arraySort", 1, 35, 36, 2, 2, "print", 1, 35, 36, -2, 2, "arrayReverse", 1, 35, 36, 2, 2, "print", 1, 35, 36, 2, 2, "arrayReverseSort", 1, 35, 36, 2, 2, "print", 1, 35, -32, "------", 2, "print", 1, 35, 33, 0, 36, 2, 2, "has", 2, 2, "print", 1, 35, 33, 2, 36, 2, 2, "has", 2, 2, "print", 1, -35, 32, "banana", 36, 2, 2, "has", 2, 2, "print", 1, 35, 32, "banana", 32, "banananas", 2, "has", 2, 2, "print", 1, 35, -32, "foo", 32, "banananas", 2, "has", 2, 2, "print", 1, 35, 32, "1", 32, "1", 32, "2", 43, 2, 2, "has", 2, 2, "print", -1, 35, 35, 35, 35] +"arrayReverse", 1, 2, "print", 1, 35, 33, 3, 33, 2, 33, 1, 43, 3, 2, "arrayReverseSort", 1, 2, "print", 1, 35, 33, 1, +33, 2, 33, 3, 43, 3, 32, ",", 2, "arrayStringConcat", 2, 2, "print", 1, 35, 32, "-----", 2, "print", 1, 35, 33, 1, 33, +2, 33, 3, 33, 4, 43, 4, 36, 2, 2, "print", 1, 35, 36, 2, 33, 5, 2, "arrayPushBack", 2, 35, 36, 2, 2, "print", 1, 35, 36, +2, 33, 0, 2, "arrayPushFront", 2, 35, 36, 2, 2, "print", 1, 35, 36, 2, 2, "arrayPopBack", 1, 35, 36, 2, 2, "print", 1, +35, 36, 2, 2, "arrayPopFront", 1, 35, 36, 2, 2, "print", 1, 35, 36, 2, 2, "arraySort", 1, 35, 36, 2, 2, "print", 1, 35, +36, 2, 2, "arrayReverse", 1, 35, 36, 2, 2, "print", 1, 35, 36, 2, 2, "arrayReverseSort", 1, 35, 36, 2, 2, "print", 1, +35, 32, "------", 2, "print", 1, 35, 36, 2, 33, 0, 2, "has", 2, 2, "print", 1, 35, 36, 2, 33, 2, 2, "has", 2, 2, +"print", 1, 35, 36, 2, 32, "banana", 2, "has", 2, 2, "print", 1, 35, 32, "banananas", 32, "banana", 2, "has", 2, 2, +"print", 1, 35, 32, "banananas", 32, "foo", 2, "has", 2, 2, "print", 1, 35, 32, "1", 32, "2", 43, 2, 32, "1", 2, "has", +2, 2, "print", 1, 35, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/catch.hoge b/hogvm/__tests__/__snapshots__/catch.hoge index 3fbbdda162693..e3588d7a1cdf9 100644 --- a/hogvm/__tests__/__snapshots__/catch.hoge +++ b/hogvm/__tests__/__snapshots__/catch.hoge @@ -1,11 +1,11 @@ -["_h", 41, "FishError", 1, 9, 31, 36, 0, 32, "FishError", 2, "HogError", 3, 38, 41, "FoodError", 1, 9, 31, 36, 0, 32, -"FoodError", 2, "HogError", 3, 38, 50, 10, 32, "You forgot to feed your fish", 2, "FishError", 1, 49, 51, 39, 55, 36, 0, -32, "type", 45, 32, "FoodError", 36, 1, 11, 40, 16, 36, 0, 32, "message", 45, 32, "Problem with your food: ", 2, -"concat", 2, 2, "print", 1, 35, 39, 25, 32, "FishError", 36, 1, 11, 40, 16, 36, 0, 32, "message", 45, 32, -"Problem with your fish: ", 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 50, 10, 32, -"Your fish are hungry", 2, "FoodError", 1, 49, 51, 39, 55, 36, 0, 32, "type", 45, 32, "FoodError", 36, 1, 11, 40, 16, -36, 0, 32, "message", 45, 32, "Problem with your food: ", 2, "concat", 2, 2, "print", 1, 35, 39, 25, 32, "FishError", -36, 1, 11, 40, 16, 36, 0, 32, "message", 45, 32, "Problem with your fish: ", 2, "concat", 2, 2, "print", 1, 35, 39, 2, -35, 49, 35, 35, 50, 11, 31, 32, "Your fish are hungry", 2, "NotImplementedError", 2, 49, 51, 39, 45, 36, 0, 32, "type", -45, 32, "FoodError", 36, 1, 11, 40, 16, 36, 0, 32, "message", 45, 32, "Problem with your food: ", 2, "concat", 2, 2, -"print", 1, 35, 39, 15, 36, 0, 32, "Unknown problem: ", 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35] +["_H", 1, 41, "FishError", 1, 9, 32, "FishError", 36, 0, 31, 2, "HogError", 3, 38, 41, "FoodError", 1, 9, 32, +"FoodError", 36, 0, 31, 2, "HogError", 3, 38, 50, 10, 32, "You forgot to feed your fish", 2, "FishError", 1, 49, 51, 39, +55, 36, 0, 32, "type", 45, 32, "FoodError", 36, 1, 11, 40, 16, 32, "Problem with your food: ", 36, 0, 32, "message", 45, +2, "concat", 2, 2, "print", 1, 35, 39, 25, 32, "FishError", 36, 1, 11, 40, 16, 32, "Problem with your fish: ", 36, 0, +32, "message", 45, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 50, 10, 32, "Your fish are hungry", 2, +"FoodError", 1, 49, 51, 39, 55, 36, 0, 32, "type", 45, 32, "FoodError", 36, 1, 11, 40, 16, 32, +"Problem with your food: ", 36, 0, 32, "message", 45, 2, "concat", 2, 2, "print", 1, 35, 39, 25, 32, "FishError", 36, 1, +11, 40, 16, 32, "Problem with your fish: ", 36, 0, 32, "message", 45, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, +35, 35, 50, 11, 32, "Your fish are hungry", 31, 2, "NotImplementedError", 2, 49, 51, 39, 45, 36, 0, 32, "type", 45, 32, +"FoodError", 36, 1, 11, 40, 16, 32, "Problem with your food: ", 36, 0, 32, "message", 45, 2, "concat", 2, 2, "print", 1, +35, 39, 15, 32, "Unknown problem: ", 36, 0, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/catch2.hoge b/hogvm/__tests__/__snapshots__/catch2.hoge index 52aa53597834c..d99e3cd75ae3a 100644 --- a/hogvm/__tests__/__snapshots__/catch2.hoge +++ b/hogvm/__tests__/__snapshots__/catch2.hoge @@ -1,15 +1,15 @@ -["_h", 50, 50, 50, 13, 31, 32, "You forgot to feed your fish", 32, "FishError", 2, "HogError", 3, 49, 51, 39, 32, 36, 0, -32, "type", 45, 32, "FoodError", 36, 1, 11, 40, 16, 36, 0, 32, "message", 45, 32, "Problem with your food: ", 2, +["_H", 1, 50, 50, 50, 13, 32, "FishError", 32, "You forgot to feed your fish", 31, 2, "HogError", 3, 49, 51, 39, 32, 36, +0, 32, "type", 45, 32, "FoodError", 36, 1, 11, 40, 16, 32, "Problem with your food: ", 36, 0, 32, "message", 45, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 51, 39, 48, 36, 0, 32, "type", 45, 32, "FishError", 36, 1, 11, -40, 16, 36, 0, 32, "message", 45, 32, "FishError: ", 2, "concat", 2, 2, "print", 1, 35, 39, 18, 36, 0, 32, "message", -45, 32, "Error: ", 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 50, 50, 50, 13, 31, 32, -"You forgot to feed your fish", 32, "FunkyError", 2, "HogError", 3, 49, 51, 39, 32, 36, 0, 32, "type", 45, 32, -"FoodError", 36, 1, 11, 40, 16, 36, 0, 32, "message", 45, 32, "Problem with your food: ", 2, "concat", 2, 2, "print", 1, -35, 39, 2, 35, 49, 35, 35, 51, 39, 55, 36, 0, 32, "type", 45, 32, "FishError", 36, 1, 11, 40, 16, 36, 0, 32, "message", -45, 32, "FishError: ", 2, "concat", 2, 2, "print", 1, 35, 39, 25, 36, 0, 32, "message", 45, 32, ": ", 36, 0, 32, "name", -45, 32, "Error of type ", 2, "concat", 4, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 50, 50, 50, 13, 31, 32, -"You forgot to feed your fish", 32, "FishError", 2, "HogError", 3, 49, 51, 39, 32, 36, 0, 32, "type", 45, 32, -"FoodError", 36, 1, 11, 40, 16, 36, 0, 32, "message", 45, 32, "Problem with your food: ", 2, "concat", 2, 2, "print", 1, -35, 39, 2, 35, 49, 35, 35, 51, 39, 55, 36, 0, 32, "type", 45, 36, 0, 32, "message", 45, 32, ": ", 36, 0, 32, "name", 45, -32, "Error of type ", 2, "concat", 4, 2, "print", 1, 35, 39, 25, 32, "FishError", 36, 1, 11, 40, 16, 36, 0, 32, -"message", 45, 32, "FishError: ", 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35] +40, 16, 32, "FishError: ", 36, 0, 32, "message", 45, 2, "concat", 2, 2, "print", 1, 35, 39, 18, 32, "Error: ", 36, 0, +32, "message", 45, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 50, 50, 50, 13, 32, "FunkyError", 32, +"You forgot to feed your fish", 31, 2, "HogError", 3, 49, 51, 39, 32, 36, 0, 32, "type", 45, 32, "FoodError", 36, 1, 11, +40, 16, 32, "Problem with your food: ", 36, 0, 32, "message", 45, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, +35, 51, 39, 55, 36, 0, 32, "type", 45, 32, "FishError", 36, 1, 11, 40, 16, 32, "FishError: ", 36, 0, 32, "message", 45, +2, "concat", 2, 2, "print", 1, 35, 39, 25, 32, "Error of type ", 36, 0, 32, "name", 45, 32, ": ", 36, 0, 32, "message", +45, 2, "concat", 4, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 50, 50, 50, 13, 32, "FishError", 32, +"You forgot to feed your fish", 31, 2, "HogError", 3, 49, 51, 39, 32, 36, 0, 32, "type", 45, 32, "FoodError", 36, 1, 11, +40, 16, 32, "Problem with your food: ", 36, 0, 32, "message", 45, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, +35, 51, 39, 55, 36, 0, 32, "type", 45, 32, "Error of type ", 36, 0, 32, "name", 45, 32, ": ", 36, 0, 32, "message", 45, +2, "concat", 4, 2, "print", 1, 35, 39, 25, 32, "FishError", 36, 1, 11, 40, 16, 32, "FishError: ", 36, 0, 32, "message", +45, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/crypto.hoge b/hogvm/__tests__/__snapshots__/crypto.hoge index 5100cc860009a..e273437821c50 100644 --- a/hogvm/__tests__/__snapshots__/crypto.hoge +++ b/hogvm/__tests__/__snapshots__/crypto.hoge @@ -1,4 +1,4 @@ -["_h", 32, "this is a secure string", 36, 0, 32, "string:", 2, "print", 2, 35, 36, 0, 2, "md5Hex", 1, 32, -"md5Hex(string):", 2, "print", 2, 35, 36, 0, 2, "sha256Hex", 1, 32, "sha256Hex(string):", 2, "print", 2, 35, 32, "1", -32, "string", 32, "more", 32, "keys", 43, 4, 36, 1, 32, "data:", 2, "print", 2, 35, 36, 1, 2, "sha256HmacChainHex", 1, -32, "sha256HmacChainHex(data):", 2, "print", 2, 35, 35, 35] +["_H", 1, 32, "this is a secure string", 32, "string:", 36, 0, 2, "print", 2, 35, 32, "md5Hex(string):", 36, 0, 2, +"md5Hex", 1, 2, "print", 2, 35, 32, "sha256Hex(string):", 36, 0, 2, "sha256Hex", 1, 2, "print", 2, 35, 32, "1", 32, +"string", 32, "more", 32, "keys", 43, 4, 32, "data:", 36, 1, 2, "print", 2, 35, 32, "sha256HmacChainHex(data):", 36, 1, +2, "sha256HmacChainHex", 1, 2, "print", 2, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/date.hoge b/hogvm/__tests__/__snapshots__/date.hoge index a655406038a05..ab77e9efa77b3 100644 --- a/hogvm/__tests__/__snapshots__/date.hoge +++ b/hogvm/__tests__/__snapshots__/date.hoge @@ -1,28 +1,28 @@ -["_h", 33, 1234334543, 2, "fromUnixTimestamp", 1, 36, 0, 2, "print", 1, 35, 36, 0, 2, "toString", 1, 2, "print", 1, 35, -36, 0, 2, "toUnixTimestamp", 1, 2, "toInt", 1, 2, "print", 1, 35, 32, "-", 2, "print", 1, 35, 32, "2024-05-03", 2, +["_H", 1, 33, 1234334543, 2, "fromUnixTimestamp", 1, 36, 0, 2, "print", 1, 35, 36, 0, 2, "toString", 1, 2, "print", 1, +35, 36, 0, 2, "toUnixTimestamp", 1, 2, "toInt", 1, 2, "print", 1, 35, 32, "-", 2, "print", 1, 35, 32, "2024-05-03", 2, "toDate", 1, 36, 1, 2, "print", 1, 35, 36, 1, 2, "toString", 1, 2, "print", 1, 35, 36, 1, 2, "toUnixTimestamp", 1, 2, "toInt", 1, 2, "print", 1, 35, 32, "-", 2, "print", 1, 35, 32, "2024-05-03T12:34:56Z", 2, "toDateTime", 1, 36, 2, 2, "print", 1, 35, 36, 2, 2, "toString", 1, 2, "print", 1, 35, 36, 2, 2, "toUnixTimestamp", 1, 2, "toInt", 1, 2, "print", -1, 35, 32, "------", 2, "print", 1, 35, 32, "Europe/Brussels", 36, 2, 2, "toTimeZone", 2, 2, "print", 1, 35, 32, -"Europe/Brussels", 36, 2, 2, "toTimeZone", 2, 2, "toString", 1, 2, "print", 1, 35, 32, "-", 2, "print", 1, 35, 32, -"Europe/Tallinn", 36, 2, 2, "toTimeZone", 2, 2, "print", 1, 35, 32, "Europe/Tallinn", 36, 2, 2, "toTimeZone", 2, 2, -"toString", 1, 2, "print", 1, 35, 32, "-", 2, "print", 1, 35, 32, "America/New_York", 36, 2, 2, "toTimeZone", 2, 2, -"print", 1, 35, 32, "America/New_York", 36, 2, 2, "toTimeZone", 2, 2, "toString", 1, 2, "print", 1, 35, 32, "------", 2, -"print", 1, 35, 34, 1234334543.123, 2, "fromUnixTimestamp", 1, 36, 3, 32, "timestamp: ", -2, "print", 2, 35, 36, 3, 2, "toString", 1, 32, "toString(timestamp): ", 2, "print", 2, 35, 36, 3, -2, "toInt", 1, 32, "toInt(timestamp): ", 2, "print", 2, 35, 36, 3, 2, "toInt", 1, 2, -"toDateTime", 1, 32, "toDateTime(toInt(timestamp)): ", 2, "print", 2, 35, 36, 3, 2, "toInt", 1, 2, -"toDateTime", 1, 2, "toInt", 1, 32, "toInt(toDateTime(toInt(timestamp))): ", 2, "print", 2, 35, 36, 3, 2, "toInt", -1, 2, "toDateTime", 1, 2, "toString", 1, 32, "toString(toDateTime(toInt(timestamp))): ", 2, "print", 2, 35, 36, 3, 2, -"toFloat", 1, 32, "toFloat(timestamp): ", 2, "print", 2, 35, 36, 3, 2, "toFloat", 1, 2, -"toDateTime", 1, 32, "toDateTime(toFloat(timestamp)): ", 2, "print", 2, 35, 36, 3, 2, "toFloat", 1, 2, -"toDateTime", 1, 2, "toFloat", 1, 32, "toFloat(toDateTime(toFloat(timestamp))): ", 2, "print", 2, 35, 36, 3, 2, -"toFloat", 1, 2, "toDateTime", 1, 2, "toString", 1, 32, "toString(toDateTime(toFloat(timestamp))): ", 2, "print", 2, 35, -32, "------", 2, "print", 1, 35, 33, 1234334543123, 2, "fromUnixTimestampMilli", 1, 36, 4, 2, "toString", 1, 32, -"millisTs: ", 2, "print", 2, 35, 36, 4, 2, "toString", 1, 32, -"toString(millisTs): ", 2, "print", 2, 35, 36, 4, 2, "toInt", 1, 32, -"toInt(millisTs): ", 2, "print", 2, 35, 36, 4, 2, "toFloat", 1, 32, -"toFloat(millisTs): ", 2, "print", 2, 35, 36, 4, 2, "toUnixTimestampMilli", 1, 32, -"toUnixTimestampMilli(millisTs): ", 2, "print", 2, 35, 32, "------", 2, "print", 1, 35, 32, "2024-05-03", 2, -"toDate", 1, 36, 5, 2, "print", 1, 35, 36, 5, 2, "toString", 1, 2, "print", 1, 35, 36, 5, 2, "toInt", 1, 2, "print", 1, -35, 35, 35, 35, 35, 35, 35] +1, 35, 32, "------", 2, "print", 1, 35, 36, 2, 32, "Europe/Brussels", 2, "toTimeZone", 2, 2, "print", 1, 35, 36, 2, 32, +"Europe/Brussels", 2, "toTimeZone", 2, 2, "toString", 1, 2, "print", 1, 35, 32, "-", 2, "print", 1, 35, 36, 2, 32, +"Europe/Tallinn", 2, "toTimeZone", 2, 2, "print", 1, 35, 36, 2, 32, "Europe/Tallinn", 2, "toTimeZone", 2, 2, "toString", +1, 2, "print", 1, 35, 32, "-", 2, "print", 1, 35, 36, 2, 32, "America/New_York", 2, "toTimeZone", 2, 2, "print", 1, 35, +36, 2, 32, "America/New_York", 2, "toTimeZone", 2, 2, "toString", 1, 2, "print", 1, 35, 32, "------", 2, "print", 1, 35, +34, 1234334543.123, 2, "fromUnixTimestamp", 1, 32, "timestamp: ", 36, 3, 2, "print", 2, +35, 32, "toString(timestamp): ", 36, 3, 2, "toString", 1, 2, "print", 2, 35, 32, +"toInt(timestamp): ", 36, 3, 2, "toInt", 1, 2, "print", 2, 35, 32, +"toDateTime(toInt(timestamp)): ", 36, 3, 2, "toInt", 1, 2, "toDateTime", 1, 2, "print", 2, 35, 32, +"toInt(toDateTime(toInt(timestamp))): ", 36, 3, 2, "toInt", 1, 2, "toDateTime", 1, 2, "toInt", 1, 2, "print", 2, +35, 32, "toString(toDateTime(toInt(timestamp))): ", 36, 3, 2, "toInt", 1, 2, "toDateTime", 1, 2, "toString", 1, 2, +"print", 2, 35, 32, "toFloat(timestamp): ", 36, 3, 2, "toFloat", 1, 2, "print", 2, 35, 32, +"toDateTime(toFloat(timestamp)): ", 36, 3, 2, "toFloat", 1, 2, "toDateTime", 1, 2, "print", 2, 35, 32, +"toFloat(toDateTime(toFloat(timestamp))): ", 36, 3, 2, "toFloat", 1, 2, "toDateTime", 1, 2, "toFloat", 1, 2, "print", +2, 35, 32, "toString(toDateTime(toFloat(timestamp))): ", 36, 3, 2, "toFloat", 1, 2, "toDateTime", 1, 2, "toString", 1, +2, "print", 2, 35, 32, "------", 2, "print", 1, 35, 33, 1234334543123, 2, "fromUnixTimestampMilli", 1, 32, +"millisTs: ", 36, 4, 2, "toString", 1, 2, "print", 2, 35, 32, +"toString(millisTs): ", 36, 4, 2, "toString", 1, 2, "print", 2, 35, 32, +"toInt(millisTs): ", 36, 4, 2, "toInt", 1, 2, "print", 2, 35, 32, +"toFloat(millisTs): ", 36, 4, 2, "toFloat", 1, 2, "print", 2, 35, 32, +"toUnixTimestampMilli(millisTs): ", 36, 4, 2, "toUnixTimestampMilli", 1, 2, "print", 2, 35, 32, "------", 2, +"print", 1, 35, 32, "2024-05-03", 2, "toDate", 1, 36, 5, 2, "print", 1, 35, 36, 5, 2, "toString", 1, 2, "print", 1, 35, +36, 5, 2, "toInt", 1, 2, "print", 1, 35, 35, 35, 35, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/dateFormat.hoge b/hogvm/__tests__/__snapshots__/dateFormat.hoge index eea4f091c7fbd..98706577e6cb1 100644 --- a/hogvm/__tests__/__snapshots__/dateFormat.hoge +++ b/hogvm/__tests__/__snapshots__/dateFormat.hoge @@ -1,32 +1,32 @@ -["_h", 34, 1234377543.123456, 2, "fromUnixTimestamp", 1, 32, "%Y-%m-%d %H:%i:%S", 36, 0, 2, "formatDateTime", 2, 2, -"print", 1, 35, 32, "Europe/Brussels", 32, "%Y-%m-%d %H:%i:%S", 36, 0, 2, "formatDateTime", 3, 2, "print", 1, 35, 32, -"America/New_York", 32, "%Y-%m-%d %H:%i:%S", 36, 0, 2, "formatDateTime", 3, 2, "print", 1, 35, 32, "%Y%m%dT%H%i%sZ", 36, -0, 2, "formatDateTime", 2, 2, "print", 1, 35, 32, "-----", 2, "print", 1, 35, 32, "%a", 36, 0, 2, "formatDateTime", 2, -32, "%a: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%b", 36, 0, 2, "formatDateTime", 2, 32, "%b: ", 2, "concat", 2, 2, -"print", 1, 35, 32, "%c", 36, 0, 2, "formatDateTime", 2, 32, "%c: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%C", 36, 0, -2, "formatDateTime", 2, 32, "%C: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%d", 36, 0, 2, "formatDateTime", 2, 32, -"%d: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%D", 36, 0, 2, "formatDateTime", 2, 32, "%D: ", 2, "concat", 2, 2, -"print", 1, 35, 32, "%e", 36, 0, 2, "formatDateTime", 2, 32, "%e: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%F", 36, 0, -2, "formatDateTime", 2, 32, "%F: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%g", 36, 0, 2, "formatDateTime", 2, 32, -"%g: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%G", 36, 0, 2, "formatDateTime", 2, 32, "%G: ", 2, "concat", 2, 2, -"print", 1, 35, 32, "%h", 36, 0, 2, "formatDateTime", 2, 32, "%h: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%H", 36, 0, -2, "formatDateTime", 2, 32, "%H: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%i", 36, 0, 2, "formatDateTime", 2, 32, -"%i: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%I", 36, 0, 2, "formatDateTime", 2, 32, "%I: ", 2, "concat", 2, 2, -"print", 1, 35, 32, "%j", 36, 0, 2, "formatDateTime", 2, 32, "%j: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%k", 36, 0, -2, "formatDateTime", 2, 32, "%k: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%l", 36, 0, 2, "formatDateTime", 2, 32, -"%l: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%m", 36, 0, 2, "formatDateTime", 2, 32, "%m: ", 2, "concat", 2, 2, -"print", 1, 35, 32, "%M", 36, 0, 2, "formatDateTime", 2, 32, "%M: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%n", 36, 0, -2, "formatDateTime", 2, 32, "%n: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%p", 36, 0, 2, "formatDateTime", 2, 32, -"%p: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%r", 36, 0, 2, "formatDateTime", 2, 32, "%r: ", 2, "concat", 2, 2, -"print", 1, 35, 32, "%R", 36, 0, 2, "formatDateTime", 2, 32, "%R: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%s", 36, 0, -2, "formatDateTime", 2, 32, "%s: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%S", 36, 0, 2, "formatDateTime", 2, 32, -"%S: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%t", 36, 0, 2, "formatDateTime", 2, 32, "%t: ", 2, "concat", 2, 2, -"print", 1, 35, 32, "%T", 36, 0, 2, "formatDateTime", 2, 32, "%T: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%u", 36, 0, -2, "formatDateTime", 2, 32, "%u: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%V", 36, 0, 2, "formatDateTime", 2, 32, -"%V: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%w", 36, 0, 2, "formatDateTime", 2, 32, "%w: ", 2, "concat", 2, 2, -"print", 1, 35, 32, "%W", 36, 0, 2, "formatDateTime", 2, 32, "%W: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%y", 36, 0, -2, "formatDateTime", 2, 32, "%y: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%Y", 36, 0, 2, "formatDateTime", 2, 32, -"%Y: ", 2, "concat", 2, 2, "print", 1, 35, 32, "%z", 36, 0, 2, "formatDateTime", 2, 32, "%z: ", 2, "concat", 2, 2, -"print", 1, 35, 32, "%%", 36, 0, 2, "formatDateTime", 2, 32, "%%: ", 2, "concat", 2, 2, "print", 1, 35, 32, "-----", 2, -"print", 1, 35, 32, "one banana", 36, 0, 2, "formatDateTime", 2, 2, "print", 1, 35, 32, -"%Y no way %m is this %d a %H real %i time %S", 36, 0, 2, "formatDateTime", 2, 2, "print", 1, 35, 35] +["_H", 1, 34, 1234377543.123456, 2, "fromUnixTimestamp", 1, 36, 0, 32, "%Y-%m-%d %H:%i:%S", 2, "formatDateTime", 2, 2, +"print", 1, 35, 36, 0, 32, "%Y-%m-%d %H:%i:%S", 32, "Europe/Brussels", 2, "formatDateTime", 3, 2, "print", 1, 35, 36, 0, +32, "%Y-%m-%d %H:%i:%S", 32, "America/New_York", 2, "formatDateTime", 3, 2, "print", 1, 35, 36, 0, 32, "%Y%m%dT%H%i%sZ", +2, "formatDateTime", 2, 2, "print", 1, 35, 32, "-----", 2, "print", 1, 35, 32, "%a: ", 36, 0, 32, "%a", 2, +"formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%b: ", 36, 0, 32, "%b", 2, "formatDateTime", 2, 2, +"concat", 2, 2, "print", 1, 35, 32, "%c: ", 36, 0, 32, "%c", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, +32, "%C: ", 36, 0, 32, "%C", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%d: ", 36, 0, 32, "%d", 2, +"formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%D: ", 36, 0, 32, "%D", 2, "formatDateTime", 2, 2, +"concat", 2, 2, "print", 1, 35, 32, "%e: ", 36, 0, 32, "%e", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, +32, "%F: ", 36, 0, 32, "%F", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%g: ", 36, 0, 32, "%g", 2, +"formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%G: ", 36, 0, 32, "%G", 2, "formatDateTime", 2, 2, +"concat", 2, 2, "print", 1, 35, 32, "%h: ", 36, 0, 32, "%h", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, +32, "%H: ", 36, 0, 32, "%H", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%i: ", 36, 0, 32, "%i", 2, +"formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%I: ", 36, 0, 32, "%I", 2, "formatDateTime", 2, 2, +"concat", 2, 2, "print", 1, 35, 32, "%j: ", 36, 0, 32, "%j", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, +32, "%k: ", 36, 0, 32, "%k", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%l: ", 36, 0, 32, "%l", 2, +"formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%m: ", 36, 0, 32, "%m", 2, "formatDateTime", 2, 2, +"concat", 2, 2, "print", 1, 35, 32, "%M: ", 36, 0, 32, "%M", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, +32, "%n: ", 36, 0, 32, "%n", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%p: ", 36, 0, 32, "%p", 2, +"formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%r: ", 36, 0, 32, "%r", 2, "formatDateTime", 2, 2, +"concat", 2, 2, "print", 1, 35, 32, "%R: ", 36, 0, 32, "%R", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, +32, "%s: ", 36, 0, 32, "%s", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%S: ", 36, 0, 32, "%S", 2, +"formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%t: ", 36, 0, 32, "%t", 2, "formatDateTime", 2, 2, +"concat", 2, 2, "print", 1, 35, 32, "%T: ", 36, 0, 32, "%T", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, +32, "%u: ", 36, 0, 32, "%u", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%V: ", 36, 0, 32, "%V", 2, +"formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%w: ", 36, 0, 32, "%w", 2, "formatDateTime", 2, 2, +"concat", 2, 2, "print", 1, 35, 32, "%W: ", 36, 0, 32, "%W", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, +32, "%y: ", 36, 0, 32, "%y", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%Y: ", 36, 0, 32, "%Y", 2, +"formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, 32, "%z: ", 36, 0, 32, "%z", 2, "formatDateTime", 2, 2, +"concat", 2, 2, "print", 1, 35, 32, "%%: ", 36, 0, 32, "%%", 2, "formatDateTime", 2, 2, "concat", 2, 2, "print", 1, 35, +32, "-----", 2, "print", 1, 35, 36, 0, 32, "one banana", 2, "formatDateTime", 2, 2, "print", 1, 35, 36, 0, 32, +"%Y no way %m is this %d a %H real %i time %S", 2, "formatDateTime", 2, 2, "print", 1, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/dicts.hoge b/hogvm/__tests__/__snapshots__/dicts.hoge index 91aee718f176b..f0c4895e60b94 100644 --- a/hogvm/__tests__/__snapshots__/dicts.hoge +++ b/hogvm/__tests__/__snapshots__/dicts.hoge @@ -1,6 +1,6 @@ -["_h", 42, 0, 2, "print", 1, 35, 32, "key", 32, "value", 42, 1, 2, "print", 1, 35, 32, "key", 32, "value", 32, "other", -32, "thing", 42, 2, 2, "print", 1, 35, 32, "key", 32, "otherKey", 32, "value", 42, 1, 42, 1, 2, "print", 1, 35, 32, -"key", 1, 1, 32, "value", 42, 1, 2, "print", 1, 35, 33, 3, 36, 0, 32, "value", 42, 1, 2, "print", 1, 35, 32, "key", 32, -"value", 42, 1, 32, "key", 45, 2, "print", 1, 35, 32, "key", 32, "value", 42, 1, 32, "key", 45, 2, "print", 1, 35, 32, -"key", 32, "otherKey", 32, "value", 42, 1, 42, 1, 32, "key", 45, 32, "otherKey", 45, 2, "print", 1, 35, 32, "key", 32, -"otherKey", 32, "value", 42, 1, 42, 1, 32, "key", 45, 32, "otherKey", 45, 2, "print", 1, 35, 35] +["_H", 1, 42, 0, 2, "print", 1, 35, 32, "key", 32, "value", 42, 1, 2, "print", 1, 35, 32, "key", 32, "value", 32, +"other", 32, "thing", 42, 2, 2, "print", 1, 35, 32, "key", 32, "otherKey", 32, "value", 42, 1, 42, 1, 2, "print", 1, 35, +33, 3, 36, 0, 32, "value", 42, 1, 2, "print", 1, 35, 32, "key", 32, "value", 42, 1, 32, "key", 45, 2, "print", 1, 35, +32, "key", 32, "value", 42, 1, 32, "key", 45, 2, "print", 1, 35, 32, "key", 32, "otherKey", 32, "value", 42, 1, 42, 1, +32, "key", 45, 32, "otherKey", 45, 2, "print", 1, 35, 32, "key", 32, "otherKey", 32, "value", 42, 1, 42, 1, 32, "key", +45, 32, "otherKey", 45, 2, "print", 1, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/dicts.stdout b/hogvm/__tests__/__snapshots__/dicts.stdout index 6a8cdfef1b6eb..33e60af57d4e5 100644 --- a/hogvm/__tests__/__snapshots__/dicts.stdout +++ b/hogvm/__tests__/__snapshots__/dicts.stdout @@ -2,7 +2,6 @@ {'key': 'value'} {'key': 'value', 'other': 'thing'} {'key': {'otherKey': 'value'}} -{null: 'value'} {3: 'value'} value value diff --git a/hogvm/__tests__/__snapshots__/exceptions.hoge b/hogvm/__tests__/__snapshots__/exceptions.hoge index 41aa587028ab3..a4a5b1d16a4b2 100644 --- a/hogvm/__tests__/__snapshots__/exceptions.hoge +++ b/hogvm/__tests__/__snapshots__/exceptions.hoge @@ -1,14 +1,14 @@ -["_h", 32, "start", 2, "print", 1, 35, 50, 10, 32, "try", 2, "print", 1, 35, 51, 39, 22, 36, 0, 32, "type", 45, 32, -" was the exception", 36, 0, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 32, "------------------", 2, +["_H", 1, 32, "start", 2, "print", 1, 35, 50, 10, 32, "try", 2, "print", 1, 35, 51, 39, 22, 36, 0, 32, "type", 45, 36, +0, 32, " was the exception", 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 32, "------------------", 2, "print", 1, 35, 32, "start", 2, "print", 1, 35, 50, 10, 32, "try", 2, "print", 1, 35, 51, 39, 17, 36, 0, 32, "type", 45, 32, "No var for error, but no error", 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 32, "------------------", 2, "print", 1, -35, 50, 16, 32, "try again", 2, "print", 1, 35, 31, 31, 2, "Error", 2, 49, 51, 39, 22, 36, 0, 32, "type", 45, 32, -" was the exception", 36, 0, 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 32, "------------------", 2, -"print", 1, 35, 50, 16, 32, "try again", 2, "print", 1, 35, 31, 31, 2, "Error", 2, 49, 51, 39, 17, 36, 0, 32, "type", -45, 32, "No var for error", 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 32, "------------------", 2, "print", 1, 35, 41, -"third", 0, 15, 32, "Throwing in third", 2, "print", 1, 35, 31, 32, "Threw in third", 2, "Error", 2, 49, 31, 38, 41, -"second", 0, 12, 32, "second", 2, "print", 1, 35, 2, "third", 0, 35, 31, 38, 41, "first", 0, 12, 32, "first", 2, -"print", 1, 35, 2, "second", 0, 35, 31, 38, 41, "base", 0, 42, 32, "base", 2, "print", 1, 35, 50, 8, 2, "first", 0, 35, -51, 39, 25, 36, 0, 32, "type", 45, 36, 0, 32, "Caught in base: ", 2, "concat", 2, 2, "print", 1, 35, 36, 0, 49, 39, 2, -35, 49, 35, 35, 31, 38, 50, 8, 2, "base", 0, 35, 51, 39, 22, 36, 0, 32, "type", 45, 36, 0, 32, "Caught in root: ", 2, -"concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 32, "The end", 2, "print", 1, 35] +35, 50, 16, 32, "try again", 2, "print", 1, 35, 31, 31, 2, "Error", 2, 49, 51, 39, 22, 36, 0, 32, "type", 45, 36, 0, 32, +" was the exception", 2, "concat", 2, 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 32, "------------------", 2, "print", 1, +35, 50, 16, 32, "try again", 2, "print", 1, 35, 31, 31, 2, "Error", 2, 49, 51, 39, 17, 36, 0, 32, "type", 45, 32, +"No var for error", 2, "print", 1, 35, 39, 2, 35, 49, 35, 35, 32, "------------------", 2, "print", 1, 35, 41, "third", +0, 15, 32, "Throwing in third", 2, "print", 1, 35, 32, "Threw in third", 31, 2, "Error", 2, 49, 31, 38, 41, "second", 0, +12, 32, "second", 2, "print", 1, 35, 2, "third", 0, 35, 31, 38, 41, "first", 0, 12, 32, "first", 2, "print", 1, 35, 2, +"second", 0, 35, 31, 38, 41, "base", 0, 42, 32, "base", 2, "print", 1, 35, 50, 8, 2, "first", 0, 35, 51, 39, 25, 36, 0, +32, "type", 45, 32, "Caught in base: ", 36, 0, 2, "concat", 2, 2, "print", 1, 35, 36, 0, 49, 39, 2, 35, 49, 35, 35, 31, +38, 50, 8, 2, "base", 0, 35, 51, 39, 22, 36, 0, 32, "type", 45, 32, "Caught in root: ", 36, 0, 2, "concat", 2, 2, +"print", 1, 35, 39, 2, 35, 49, 35, 35, 32, "The end", 2, "print", 1, 35] diff --git a/hogvm/__tests__/__snapshots__/functions.hoge b/hogvm/__tests__/__snapshots__/functions.hoge index aa61602e4a1c7..0351cb7ce179d 100644 --- a/hogvm/__tests__/__snapshots__/functions.hoge +++ b/hogvm/__tests__/__snapshots__/functions.hoge @@ -1,14 +1,14 @@ -["_h", 32, "-- test functions --", 2, "print", 1, 35, 41, "add", 2, 6, 36, 0, 36, 1, 6, 38, 41, "add2", 2, 9, 36, 0, 36, -1, 6, 36, 2, 38, 35, 41, "mult", 2, 6, 36, 0, 36, 1, 8, 38, 41, "noArgs", 0, 12, 32, "basdfasdf", 33, 3, 33, 2, 6, 36, -1, 38, 35, 35, 41, "empty", 0, 2, 31, 38, 41, "empty2", 0, 2, 31, 38, 41, "empty3", 0, 2, 31, 38, 41, "noReturn", 0, 14, -33, 1, 33, 2, 36, 1, 36, 0, 6, 31, 38, 35, 35, 35, 41, "emptyReturn", 0, 2, 31, 38, 41, "emptyReturnBeforeOtherStuff", -0, 10, 31, 38, 33, 2, 33, 2, 6, 35, 31, 38, 41, "emptyReturnBeforeOtherStuffNoSemicolon", 0, 6, 33, 2, 33, 2, 6, 38, 41, -"ifThenReturn", 0, 8, 30, 40, 2, 31, 38, 33, 4, 38, 33, 4, 33, 3, 2, "add", 2, 2, "print", 1, 35, 33, 1, 33, 1, 2, -"add", 2, 33, 100, 33, 4, 33, 3, 2, "add", 2, 6, 6, 2, "print", 1, 35, 2, "noArgs", 0, 47, 3, 35, 33, -1, 2, "print", 1, -35, 2, "empty", 0, 47, 3, 35, 33, -1, 2, "print", 1, 35, 2, "empty2", 0, 47, 3, 35, 33, -1, 2, "print", 1, 35, 2, -"empty3", 0, 47, 3, 35, 33, -1, 2, "print", 1, 35, 2, "noReturn", 0, 47, 3, 35, 33, -1, 2, "print", 1, 35, 2, -"emptyReturn", 0, 47, 3, 35, 33, -1, 2, "print", 1, 35, 2, "emptyReturnBeforeOtherStuff", 0, 47, 3, 35, 33, -1, 2, -"print", 1, 35, 2, "emptyReturnBeforeOtherStuffNoSemicolon", 0, 47, 3, 35, 33, -1, 2, "print", 1, 35, 2, "ifThenReturn", -0, 47, 3, 35, 33, -1, 2, "print", 1, 35, 33, 2, 33, 1, 33, 2, 2, "add", 2, 33, 100, 33, 4, 33, 3, 2, "add", 2, 6, 6, 2, -"mult", 2, 2, "print", 1, 35, 33, 10, 33, 1, 33, 2, 2, "add2", 2, 33, 100, 33, 4, 33, 3, 2, "add2", 2, 6, 6, 2, "mult", -2, 2, "print", 1, 35] +["_H", 1, 32, "-- test functions --", 2, "print", 1, 35, 41, "add", 2, 6, 36, 1, 36, 0, 6, 38, 41, "add2", 2, 9, 36, 1, +36, 0, 6, 36, 2, 38, 35, 41, "mult", 2, 6, 36, 1, 36, 0, 8, 38, 41, "noArgs", 0, 12, 32, "basdfasdf", 33, 3, 33, 2, 6, +36, 1, 38, 35, 35, 41, "empty", 0, 2, 31, 38, 41, "empty2", 0, 2, 31, 38, 41, "empty3", 0, 2, 31, 38, 41, "noReturn", 0, +14, 33, 1, 33, 2, 36, 1, 36, 0, 6, 31, 38, 35, 35, 35, 41, "emptyReturn", 0, 2, 31, 38, 41, +"emptyReturnBeforeOtherStuff", 0, 10, 31, 38, 33, 2, 33, 2, 6, 35, 31, 38, 41, "emptyReturnBeforeOtherStuffNoSemicolon", +0, 6, 33, 2, 33, 2, 6, 38, 41, "ifThenReturn", 0, 8, 30, 40, 2, 31, 38, 33, 4, 38, 33, 3, 33, 4, 2, "add", 2, 2, +"print", 1, 35, 33, 1, 33, 1, 2, "add", 2, 33, 100, 33, 3, 33, 4, 2, "add", 2, 6, 6, 2, "print", 1, 35, 2, "noArgs", 0, +47, 3, 35, 33, -1, 2, "print", 1, 35, 2, "empty", 0, 47, 3, 35, 33, -1, 2, "print", 1, 35, 2, "empty2", 0, 47, 3, 35, +33, -1, 2, "print", 1, 35, 2, "empty3", 0, 47, 3, 35, 33, -1, 2, "print", 1, 35, 2, "noReturn", 0, 47, 3, 35, 33, -1, 2, +"print", 1, 35, 2, "emptyReturn", 0, 47, 3, 35, 33, -1, 2, "print", 1, 35, 2, "emptyReturnBeforeOtherStuff", 0, 47, 3, +35, 33, -1, 2, "print", 1, 35, 2, "emptyReturnBeforeOtherStuffNoSemicolon", 0, 47, 3, 35, 33, -1, 2, "print", 1, 35, 2, +"ifThenReturn", 0, 47, 3, 35, 33, -1, 2, "print", 1, 35, 33, 2, 33, 1, 2, "add", 2, 33, 100, 33, 3, 33, 4, 2, "add", 2, +6, 6, 33, 2, 2, "mult", 2, 2, "print", 1, 35, 33, 2, 33, 1, 2, "add2", 2, 33, 100, 33, 3, 33, 4, 2, "add2", 2, 6, 6, 33, +10, 2, "mult", 2, 2, "print", 1, 35] diff --git a/hogvm/__tests__/__snapshots__/ifElse.hoge b/hogvm/__tests__/__snapshots__/ifElse.hoge index 0aaef24a74f80..b5deffdd6f5f7 100644 --- a/hogvm/__tests__/__snapshots__/ifElse.hoge +++ b/hogvm/__tests__/__snapshots__/ifElse.hoge @@ -1,4 +1,4 @@ -["_h", 32, "-- test if else --", 2, "print", 1, 35, 29, 40, 8, 33, 1, 2, "print", 1, 35, 39, 6, 33, 2, 2, "print", 1, +["_H", 1, 32, "-- test if else --", 2, "print", 1, 35, 29, 40, 8, 33, 1, 2, "print", 1, 35, 39, 6, 33, 2, 2, "print", 1, 35, 29, 40, 8, 33, 1, 2, "print", 1, 35, 39, 6, 33, 2, 2, "print", 1, 35, 30, 40, 8, 33, 1, 2, "print", 1, 35, 39, 6, 33, 2, 2, "print", 1, 35, 29, 40, 8, 33, 1, 2, "print", 1, 35, 39, 6, 33, 2, 2, "print", 1, 35, 29, 36, 0, 40, 14, 33, 3, 33, 2, 36, 1, 6, 2, "print", 1, 35, 35, 39, 6, 33, 2, 2, "print", 1, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/ifJump.hoge b/hogvm/__tests__/__snapshots__/ifJump.hoge index 0e946f4101f07..f90184dea86fd 100644 --- a/hogvm/__tests__/__snapshots__/ifJump.hoge +++ b/hogvm/__tests__/__snapshots__/ifJump.hoge @@ -1,3 +1,3 @@ -["_h", 42, 0, 36, 0, 32, "email", 45, 32, "", 36, 1, 11, 40, 12, 32, "ERROR - Email not found!", 2, "print", 1, 35, 32, -"3", 2, "print", 1, 35, 32, "1", 2, "print", 1, 35, 32, "", 36, 1, 11, 40, 14, 32, "ERROR - Email not found!", 2, +["_H", 1, 42, 0, 36, 0, 32, "email", 45, 32, "", 36, 1, 11, 40, 12, 32, "ERROR - Email not found!", 2, "print", 1, 35, +32, "3", 2, "print", 1, 35, 32, "1", 2, "print", 1, 35, 32, "", 36, 1, 11, 40, 14, 32, "ERROR - Email not found!", 2, "print", 1, 35, 32, "3", 2, "print", 1, 35, 39, 6, 32, "else", 2, "print", 1, 35, 32, "1", 2, "print", 1, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/json.hoge b/hogvm/__tests__/__snapshots__/json.hoge index df9948388e519..e23e7a772b6b9 100644 --- a/hogvm/__tests__/__snapshots__/json.hoge +++ b/hogvm/__tests__/__snapshots__/json.hoge @@ -1,3 +1,3 @@ -["_h", 32, "[1,2,3]", 2, "jsonParse", 1, 2, "print", 1, 35, 32, "event", 32, "$pageview", 32, "properties", 32, +["_H", 1, 32, "[1,2,3]", 2, "jsonParse", 1, 2, "print", 1, 35, 32, "event", 32, "$pageview", 32, "properties", 32, "$browser", 32, "Chrome", 32, "$os", 32, "Windows", 42, 2, 42, 2, 36, 0, 2, "jsonStringify", 1, 36, 1, 2, "jsonParse", 1, 2, "print", 1, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/keysValues.hoge b/hogvm/__tests__/__snapshots__/keysValues.hoge index 594b0869a792b..83f13a3825362 100644 --- a/hogvm/__tests__/__snapshots__/keysValues.hoge +++ b/hogvm/__tests__/__snapshots__/keysValues.hoge @@ -1,4 +1,4 @@ -["_h", 33, 3, 33, 4, 33, 5, 43, 3, 33, 3, 33, 4, 33, 5, 44, 3, 32, "key", 32, "value", 32, "other", 32, "val", 42, 2, +["_H", 1, 33, 3, 33, 4, 33, 5, 43, 3, 33, 3, 33, 4, 33, 5, 44, 3, 32, "key", 32, "value", 32, "other", 32, "val", 42, 2, 32, ">> A", 2, "print", 1, 35, 36, 0, 2, "keys", 1, 2, "print", 1, 35, 36, 0, 2, "values", 1, 2, "print", 1, 35, 32, ">> B", 2, "print", 1, 35, 36, 1, 2, "keys", 1, 2, "print", 1, 35, 36, 1, 2, "values", 1, 2, "print", 1, 35, 32, ">> C", 2, "print", 1, 35, 36, 2, 2, "keys", 1, 2, "print", 1, 35, 36, 2, 2, "values", 1, 2, "print", 1, 35, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/loops.hoge b/hogvm/__tests__/__snapshots__/loops.hoge index a3328220d6bd9..51d41fb4ceadd 100644 --- a/hogvm/__tests__/__snapshots__/loops.hoge +++ b/hogvm/__tests__/__snapshots__/loops.hoge @@ -1,22 +1,22 @@ -["_h", 32, "-- test while loop --", 2, "print", 1, 35, 33, 0, 33, 3, 36, 0, 15, 40, 15, 33, 1, 36, 0, 6, 37, 0, 36, 0, -2, "print", 1, 35, 39, -22, 36, 0, 2, "print", 1, 35, 35, 32, "-- test for loop --", 2, "print", 1, 35, 33, 0, 33, 3, -36, 0, 15, 40, 15, 36, 0, 2, "print", 1, 35, 33, 1, 36, 0, 6, 37, 0, 39, -22, 35, 32, "i", 1, 1, 2, "print", 1, 35, 32, -"-- test emptier for loop --", 2, "print", 1, 35, 33, 0, 33, 3, 36, 0, 15, 40, 15, 32, "woo", 2, "print", 1, 35, 33, 1, -36, 0, 6, 37, 0, 39, -22, 32, "hoo", 2, "print", 1, 35, 35, 32, "-- for in loop with arrays --", 2, "print", 1, 35, 33, -1, 33, 2, 33, 3, 43, 3, 36, 0, 36, 1, 2, "values", 1, 33, 1, 36, 2, 2, "length", 1, 31, 36, 4, 36, 3, 16, 40, 22, 36, 2, -36, 3, 45, 37, 5, 36, 5, 2, "print", 1, 35, 36, 3, 33, 1, 6, 37, 3, 39, -29, 35, 35, 35, 35, 35, 35, 32, -"-- for in loop with arrays and keys --", 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 36, 0, 36, 1, 2, "keys", 1, 36, -1, 2, "values", 1, 33, 1, 36, 3, 2, "length", 1, 31, 31, 36, 5, 36, 4, 16, 40, 31, 36, 2, 36, 4, 45, 37, 6, 36, 3, 36, -4, 45, 37, 7, 36, 7, 36, 6, 2, "print", 2, 35, 36, 4, 33, 1, 6, 37, 4, 39, -38, 35, 35, 35, 35, 35, 35, 35, 35, 32, -"-- for in loop with tuples --", 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 44, 3, 36, 0, 36, 1, 2, "values", 1, 33, 1, 36, -2, 2, "length", 1, 31, 36, 4, 36, 3, 16, 40, 22, 36, 2, 36, 3, 45, 37, 5, 36, 5, 2, "print", 1, 35, 36, 3, 33, 1, 6, 37, -3, 39, -29, 35, 35, 35, 35, 35, 35, 32, "-- for in loop with tuples and keys --", 2, "print", 1, 35, 33, 1, 33, 2, 33, -3, 44, 3, 36, 0, 36, 1, 2, "keys", 1, 36, 1, 2, "values", 1, 33, 1, 36, 3, 2, "length", 1, 31, 31, 36, 5, 36, 4, 16, 40, -31, 36, 2, 36, 4, 45, 37, 6, 36, 3, 36, 4, 45, 37, 7, 36, 7, 36, 6, 2, "print", 2, 35, 36, 4, 33, 1, 6, 37, 4, 39, -38, -35, 35, 35, 35, 35, 35, 35, 35, 32, "-- for in loop with dicts --", 2, "print", 1, 35, 32, "first", 32, "v1", 32, -"second", 32, "v2", 32, "third", 32, "v3", 42, 3, 36, 0, 36, 1, 2, "values", 1, 33, 1, 36, 2, 2, "length", 1, 31, 36, 4, -36, 3, 16, 40, 22, 36, 2, 36, 3, 45, 37, 5, 36, 5, 2, "print", 1, 35, 36, 3, 33, 1, 6, 37, 3, 39, -29, 35, 35, 35, 35, -35, 35, 32, "-- for in loop with dicts and keys --", 2, "print", 1, 35, 32, "first", 32, "v1", 32, "second", 32, "v2", -32, "third", 32, "v3", 42, 3, 36, 0, 36, 1, 2, "keys", 1, 36, 1, 2, "values", 1, 33, 1, 36, 3, 2, "length", 1, 31, 31, -36, 5, 36, 4, 16, 40, 31, 36, 2, 36, 4, 45, 37, 6, 36, 3, 36, 4, 45, 37, 7, 36, 7, 36, 6, 2, "print", 2, 35, 36, 4, 33, -1, 6, 37, 4, 39, -38, 35, 35, 35, 35, 35, 35, 35, 35] +["_H", 1, 32, "-- test while loop --", 2, "print", 1, 35, 33, 0, 33, 3, 36, 0, 15, 40, 15, 33, 1, 36, 0, 6, 37, 0, 36, +0, 2, "print", 1, 35, 39, -22, 36, 0, 2, "print", 1, 35, 35, 32, "-- test for loop --", 2, "print", 1, 35, 33, 0, 33, 3, +36, 0, 15, 40, 15, 36, 0, 2, "print", 1, 35, 33, 1, 36, 0, 6, 37, 0, 39, -22, 35, 32, "-- test emptier for loop --", 2, +"print", 1, 35, 33, 0, 33, 3, 36, 0, 15, 40, 15, 32, "woo", 2, "print", 1, 35, 33, 1, 36, 0, 6, 37, 0, 39, -22, 32, +"hoo", 2, "print", 1, 35, 35, 32, "-- for in loop with arrays --", 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 36, 0, +36, 1, 2, "values", 1, 33, 1, 36, 2, 2, "length", 1, 31, 36, 4, 36, 3, 16, 40, 22, 36, 2, 36, 3, 45, 37, 5, 36, 5, 2, +"print", 1, 35, 36, 3, 33, 1, 6, 37, 3, 39, -29, 35, 35, 35, 35, 35, 35, 32, "-- for in loop with arrays and keys --", +2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 43, 3, 36, 0, 36, 1, 2, "keys", 1, 36, 1, 2, "values", 1, 33, 1, 36, 3, 2, +"length", 1, 31, 31, 36, 5, 36, 4, 16, 40, 31, 36, 2, 36, 4, 45, 37, 6, 36, 3, 36, 4, 45, 37, 7, 36, 6, 36, 7, 2, +"print", 2, 35, 36, 4, 33, 1, 6, 37, 4, 39, -38, 35, 35, 35, 35, 35, 35, 35, 35, 32, "-- for in loop with tuples --", 2, +"print", 1, 35, 33, 1, 33, 2, 33, 3, 44, 3, 36, 0, 36, 1, 2, "values", 1, 33, 1, 36, 2, 2, "length", 1, 31, 36, 4, 36, +3, 16, 40, 22, 36, 2, 36, 3, 45, 37, 5, 36, 5, 2, "print", 1, 35, 36, 3, 33, 1, 6, 37, 3, 39, -29, 35, 35, 35, 35, 35, +35, 32, "-- for in loop with tuples and keys --", 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 44, 3, 36, 0, 36, 1, 2, +"keys", 1, 36, 1, 2, "values", 1, 33, 1, 36, 3, 2, "length", 1, 31, 31, 36, 5, 36, 4, 16, 40, 31, 36, 2, 36, 4, 45, 37, +6, 36, 3, 36, 4, 45, 37, 7, 36, 6, 36, 7, 2, "print", 2, 35, 36, 4, 33, 1, 6, 37, 4, 39, -38, 35, 35, 35, 35, 35, 35, +35, 35, 32, "-- for in loop with dicts --", 2, "print", 1, 35, 32, "first", 32, "v1", 32, "second", 32, "v2", 32, +"third", 32, "v3", 42, 3, 36, 0, 36, 1, 2, "values", 1, 33, 1, 36, 2, 2, "length", 1, 31, 36, 4, 36, 3, 16, 40, 22, 36, +2, 36, 3, 45, 37, 5, 36, 5, 2, "print", 1, 35, 36, 3, 33, 1, 6, 37, 3, 39, -29, 35, 35, 35, 35, 35, 35, 32, +"-- for in loop with dicts and keys --", 2, "print", 1, 35, 32, "first", 32, "v1", 32, "second", 32, "v2", 32, "third", +32, "v3", 42, 3, 36, 0, 36, 1, 2, "keys", 1, 36, 1, 2, "values", 1, 33, 1, 36, 3, 2, "length", 1, 31, 31, 36, 5, 36, 4, +16, 40, 31, 36, 2, 36, 4, 45, 37, 6, 36, 3, 36, 4, 45, 37, 7, 36, 6, 36, 7, 2, "print", 2, 35, 36, 4, 33, 1, 6, 37, 4, +39, -38, 35, 35, 35, 35, 35, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/loops.stdout b/hogvm/__tests__/__snapshots__/loops.stdout index 270cb30b7ea47..0f4ad9c7493c5 100644 --- a/hogvm/__tests__/__snapshots__/loops.stdout +++ b/hogvm/__tests__/__snapshots__/loops.stdout @@ -7,7 +7,6 @@ 0 1 2 -null -- test emptier for loop -- woo woo diff --git a/hogvm/__tests__/__snapshots__/mandelbrot.hoge b/hogvm/__tests__/__snapshots__/mandelbrot.hoge index 211995efe2bc9..51895c2281458 100644 --- a/hogvm/__tests__/__snapshots__/mandelbrot.hoge +++ b/hogvm/__tests__/__snapshots__/mandelbrot.hoge @@ -1,8 +1,8 @@ -["_h", 41, "mandelbrot", 3, 93, 34, 0.0, 34, 0.0, 33, 0, 36, 0, 36, 5, 15, 33, 4, 36, 4, 36, 4, 8, 36, 3, 36, 3, 8, 6, -16, 3, 2, 40, 44, 36, 2, 36, 4, 36, 4, 8, 36, 3, 36, 3, 8, 7, 6, 36, 1, 36, 4, 36, 3, 33, 2, 8, 8, 6, 36, 6, 37, 3, 36, -7, 37, 4, 33, 1, 36, 5, 6, 37, 5, 35, 35, 39, -67, 36, 0, 36, 5, 11, 40, 5, 32, " ", 38, 39, 3, 32, "#", 38, 31, 38, 35, -35, 35, 41, "main", 0, 119, 33, 80, 33, 24, 34, -2.0, 34, 1.0, 34, -1.0, 34, 1.0, 33, 30, 33, 0, 36, 1, 36, 7, 15, 40, -86, 32, "", 33, 0, 36, 0, 36, 9, 15, 40, 58, 36, 2, 36, 2, 36, 3, 7, 36, 0, 36, 9, 9, 8, 6, 36, 4, 36, 4, 36, 5, 7, 36, -1, 36, 7, 9, 8, 6, 36, 6, 36, 11, 36, 10, 2, "mandelbrot", 3, 36, 12, 36, 8, 2, "concat", 2, 37, 8, 33, 1, 36, 9, 6, 37, -9, 35, 35, 35, 39, -65, 36, 8, 2, "print", 1, 35, 33, 1, 36, 7, 6, 37, 7, 35, 35, 39, -93, 31, 38, 35, 35, 35, 35, 35, -35, 35, 35, 2, "main", 0, 35] +["_H", 1, 41, "mandelbrot", 3, 93, 34, 0.0, 34, 0.0, 33, 0, 33, 4, 36, 4, 36, 4, 8, 36, 3, 36, 3, 8, 6, 16, 36, 2, 36, +5, 15, 3, 2, 40, 44, 36, 0, 36, 4, 36, 4, 8, 36, 3, 36, 3, 8, 7, 6, 36, 1, 36, 4, 36, 3, 33, 2, 8, 8, 6, 36, 6, 37, 3, +36, 7, 37, 4, 33, 1, 36, 5, 6, 37, 5, 35, 35, 39, -67, 36, 2, 36, 5, 11, 40, 5, 32, " ", 38, 39, 3, 32, "#", 38, 31, 38, +35, 35, 35, 41, "main", 0, 119, 33, 80, 33, 24, 34, -2.0, 34, 1.0, 34, -1.0, 34, 1.0, 33, 30, 33, 0, 36, 1, 36, 7, 15, +40, 86, 32, "", 33, 0, 36, 0, 36, 9, 15, 40, 58, 36, 2, 36, 2, 36, 3, 7, 36, 0, 36, 9, 9, 8, 6, 36, 4, 36, 4, 36, 5, 7, +36, 1, 36, 7, 9, 8, 6, 36, 10, 36, 11, 36, 6, 2, "mandelbrot", 3, 36, 8, 36, 12, 2, "concat", 2, 37, 8, 33, 1, 36, 9, 6, +37, 9, 35, 35, 35, 39, -65, 36, 8, 2, "print", 1, 35, 33, 1, 36, 7, 6, 37, 7, 35, 35, 39, -93, 31, 38, 35, 35, 35, 35, +35, 35, 35, 35, 2, "main", 0, 35] diff --git a/hogvm/__tests__/__snapshots__/operations.hoge b/hogvm/__tests__/__snapshots__/operations.hoge index 82156f1818f45..3abc0604f4040 100644 --- a/hogvm/__tests__/__snapshots__/operations.hoge +++ b/hogvm/__tests__/__snapshots__/operations.hoge @@ -1,26 +1,23 @@ -["_h", 41, "test", 1, 11, 36, 0, 2, "jsonStringify", 1, 2, "print", 1, 35, 31, 38, 32, +["_H", 1, 41, "test", 1, 11, 36, 0, 2, "jsonStringify", 1, 2, "print", 1, 35, 31, 38, 32, "-- test the most common expressions --", 2, "print", 1, 35, 33, 2, 33, 1, 6, 2, "test", 1, 35, 33, 2, 33, 1, 7, 2, "test", 1, 35, 33, 2, 33, 3, 8, 2, "test", 1, 35, 33, 2, 33, 3, 9, 2, "test", 1, 35, 33, 2, 33, 3, 10, 2, "test", 1, 35, -33, 2, 33, 1, 3, 2, 2, "test", 1, 35, 33, 0, 33, 1, 4, 2, 2, "test", 1, 35, 33, 0, 33, 1, 3, 2, 2, "test", 1, 35, 33, 2, -33, 1, 33, 0, 3, 2, 33, 1, 4, 3, 2, "test", 1, 35, 33, 1, 33, 0, 33, 1, 3, 3, 2, "test", 1, 35, 33, 2, 33, 1, 4, 2, 33, -2, 33, 1, 4, 2, 3, 2, 2, "test", 1, 35, 29, 2, "test", 1, 35, 29, 5, 2, "test", 1, 35, 30, 2, "test", 1, 35, 31, 2, +33, 1, 33, 2, 3, 2, 2, "test", 1, 35, 33, 1, 33, 0, 4, 2, 2, "test", 1, 35, 33, 1, 33, 0, 3, 2, 2, "test", 1, 35, 33, 1, +33, 0, 33, 1, 3, 2, 33, 2, 4, 3, 2, "test", 1, 35, 33, 1, 33, 0, 33, 1, 3, 3, 2, "test", 1, 35, 33, 1, 33, 2, 4, 2, 33, +1, 33, 2, 4, 2, 3, 2, 2, "test", 1, 35, 29, 2, "test", 1, 35, 29, 5, 2, "test", 1, 35, 30, 2, "test", 1, 35, 31, 2, "test", 1, 35, 34, 3.14, 2, "test", 1, 35, 33, 2, 33, 1, 11, 2, "test", 1, 35, 33, 2, 33, 1, 11, 2, "test", 1, 35, 33, 2, 33, 1, 12, 2, "test", 1, 35, 33, 2, 33, 1, 15, 2, "test", 1, 35, 33, 2, 33, 1, 16, 2, "test", 1, 35, 33, 2, 33, 1, 13, 2, "test", 1, 35, 33, 2, 33, 1, 14, 2, "test", 1, 35, 32, "b", 32, "a", 17, 2, "test", 1, 35, 32, "%a%", 32, "baa", 17, 2, "test", 1, 35, 32, "%x%", 32, "baa", 17, 2, "test", 1, 35, 32, "%A%", 32, "baa", 18, 2, "test", 1, 35, 32, "%C%", 32, "baa", 18, 2, "test", 1, 35, 32, "b", 32, "a", 18, 2, "test", 1, 35, 32, "b", 32, "a", 19, 2, "test", 1, 35, 32, "b", 32, "a", 20, 2, "test", 1, 35, 32, "car", 32, "a", 21, 2, "test", 1, 35, 32, "foo", 32, "a", 21, 2, "test", 1, 35, -32, "car", 32, "a", 22, 2, "test", 1, 35, 32, "bla", 32, "properties", 1, 2, 2, "test", 1, 35, 32, "foo", 32, -"properties", 1, 2, 2, "test", 1, 35, 32, "foo", 32, "properties", 1, 2, 47, 2, 35, 30, 2, "test", 1, 35, 32, -"nullValue", 32, "properties", 1, 2, 47, 2, 35, 30, 2, "test", 1, 35, 32, "another", 32, "arg", 2, "concat", 2, 2, -"test", 1, 35, 31, 33, 1, 2, "concat", 2, 2, "test", 1, 35, 30, 29, 2, "concat", 2, 2, "test", 1, 35, 32, "e.*", 32, -"test", 2, "match", 2, 2, "test", 1, 35, 32, "^e.*", 32, "test", 2, "match", 2, 2, "test", 1, 35, 32, "x.*", 32, "test", -2, "match", 2, 2, "test", 1, 35, 32, "e.*", 32, "test", 23, 2, "test", 1, 35, 32, "e.*", 32, "test", 24, 2, "test", 1, -35, 32, "^e.*", 32, "test", 23, 2, "test", 1, 35, 32, "^e.*", 32, "test", 24, 2, "test", 1, 35, 32, "x.*", 32, "test", -23, 2, "test", 1, 35, 32, "x.*", 32, "test", 24, 2, "test", 1, 35, 32, "EST", 32, "test", 25, 2, "test", 1, 35, 32, -"EST", 32, "test", 25, 2, "test", 1, 35, 32, "EST", 32, "test", 26, 2, "test", 1, 35, 33, 1, 2, "toString", 1, 2, -"test", 1, 35, 34, 1.5, 2, "toString", 1, 2, "test", 1, 35, 29, 2, "toString", 1, 2, "test", 1, 35, 31, 2, "toString", -1, 2, "test", 1, 35, 32, "string", 2, "toString", 1, 2, "test", 1, 35, 32, "1", 2, "toInt", 1, 2, "test", 1, 35, 32, -"bla", 2, "toInt", 1, 2, "test", 1, 35, 32, "1.2", 2, "toFloat", 1, 2, "test", 1, 35, 32, "bla", 2, "toFloat", 1, 2, -"test", 1, 35, 32, "asd", 2, "toUUID", 1, 2, "test", 1, 35, 31, 33, 1, 11, 2, "test", 1, 35, 31, 33, 1, 12, 2, "test", -1, 35] +32, "car", 32, "a", 22, 2, "test", 1, 35, 32, "arg", 32, "another", 2, "concat", 2, 2, "test", 1, 35, 33, 1, 31, 2, +"concat", 2, 2, "test", 1, 35, 29, 30, 2, "concat", 2, 2, "test", 1, 35, 32, "test", 32, "e.*", 2, "match", 2, 2, +"test", 1, 35, 32, "test", 32, "^e.*", 2, "match", 2, 2, "test", 1, 35, 32, "test", 32, "x.*", 2, "match", 2, 2, "test", +1, 35, 32, "e.*", 32, "test", 23, 2, "test", 1, 35, 32, "e.*", 32, "test", 24, 2, "test", 1, 35, 32, "^e.*", 32, "test", +23, 2, "test", 1, 35, 32, "^e.*", 32, "test", 24, 2, "test", 1, 35, 32, "x.*", 32, "test", 23, 2, "test", 1, 35, 32, +"x.*", 32, "test", 24, 2, "test", 1, 35, 32, "EST", 32, "test", 25, 2, "test", 1, 35, 32, "EST", 32, "test", 25, 2, +"test", 1, 35, 32, "EST", 32, "test", 26, 2, "test", 1, 35, 33, 1, 2, "toString", 1, 2, "test", 1, 35, 34, 1.5, 2, +"toString", 1, 2, "test", 1, 35, 29, 2, "toString", 1, 2, "test", 1, 35, 31, 2, "toString", 1, 2, "test", 1, 35, 32, +"string", 2, "toString", 1, 2, "test", 1, 35, 32, "1", 2, "toInt", 1, 2, "test", 1, 35, 32, "bla", 2, "toInt", 1, 2, +"test", 1, 35, 32, "1.2", 2, "toFloat", 1, 2, "test", 1, 35, 32, "bla", 2, "toFloat", 1, 2, "test", 1, 35, 32, "asd", 2, +"toUUID", 1, 2, "test", 1, 35, 31, 33, 1, 11, 2, "test", 1, 35, 31, 33, 1, 12, 2, "test", 1, 35] diff --git a/hogvm/__tests__/__snapshots__/operations.stdout b/hogvm/__tests__/__snapshots__/operations.stdout index 26b21bc43634d..5017b0e3005c7 100644 --- a/hogvm/__tests__/__snapshots__/operations.stdout +++ b/hogvm/__tests__/__snapshots__/operations.stdout @@ -33,10 +33,6 @@ true true false false -null -null -false -false "arganother" "1" "truefalse" diff --git a/hogvm/__tests__/__snapshots__/printLoops.hoge b/hogvm/__tests__/__snapshots__/printLoops.hoge index a53576adf8f3e..db0bc1d43e494 100644 --- a/hogvm/__tests__/__snapshots__/printLoops.hoge +++ b/hogvm/__tests__/__snapshots__/printLoops.hoge @@ -1,4 +1,4 @@ -["_h", 32, "key", 32, "value", 32, "key2", 32, "value2", 42, 2, 32, "na", 33, 0, 33, 100, 36, 2, 15, 40, 45, 32, "na", -36, 1, 2, "concat", 2, 37, 1, 36, 0, 36, 2, 32, "key_", 2, "concat", 2, 32, "wasted", 32, " batman!", 36, 1, 32, -"memory: ", 2, "concat", 3, 32, "something", 36, 0, 42, 2, 46, 33, 1, 36, 2, 6, 37, 2, 39, -52, 35, 36, 0, 2, "print", +["_H", 1, 32, "key", 32, "value", 32, "key2", 32, "value2", 42, 2, 32, "na", 33, 0, 33, 100, 36, 2, 15, 40, 45, 36, 1, +32, "na", 2, "concat", 2, 37, 1, 36, 0, 32, "key_", 36, 2, 2, "concat", 2, 32, "wasted", 32, "memory: ", 36, 1, 32, +" batman!", 2, "concat", 3, 32, "something", 36, 0, 42, 2, 46, 33, 1, 36, 2, 6, 37, 2, 39, -52, 35, 36, 0, 2, "print", 1, 35, 36, 0, 2, "jsonStringify", 1, 36, 2, 2, "jsonParse", 1, 2, "print", 1, 35, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/printLoops2.hoge b/hogvm/__tests__/__snapshots__/printLoops2.hoge index 487604661c03e..071363c073c55 100644 --- a/hogvm/__tests__/__snapshots__/printLoops2.hoge +++ b/hogvm/__tests__/__snapshots__/printLoops2.hoge @@ -1,4 +1,4 @@ -["_h", 32, "key", 32, "value", 32, "key2", 32, "value2", 42, 2, 32, "key", 32, "value", 32, "key2", 32, "value2", 42, 2, -33, 0, 33, 30, 36, 2, 15, 40, 25, 36, 0, 36, 2, 32, "key_", 2, "concat", 2, 32, "something", 36, 1, 42, 1, 46, 33, 1, +["_H", 1, 32, "key", 32, "value", 32, "key2", 32, "value2", 42, 2, 32, "key", 32, "value", 32, "key2", 32, "value2", 42, +2, 33, 0, 33, 30, 36, 2, 15, 40, 25, 36, 0, 32, "key_", 36, 2, 2, "concat", 2, 32, "something", 36, 1, 42, 1, 46, 33, 1, 36, 2, 6, 37, 2, 39, -32, 35, 36, 0, 2, "print", 1, 35, 36, 0, 2, "jsonStringify", 1, 2, "jsonParse", 1, 2, "print", 1, 35, 35, 35] diff --git a/hogvm/__tests__/__snapshots__/properties.hoge b/hogvm/__tests__/__snapshots__/properties.hoge index 39ec0648c4063..4ce7c2821449d 100644 --- a/hogvm/__tests__/__snapshots__/properties.hoge +++ b/hogvm/__tests__/__snapshots__/properties.hoge @@ -1,4 +1,4 @@ -["_h", 33, 1, 33, 2, 32, "d", 33, 1, 33, 3, 33, 42, 33, 6, 44, 4, 42, 1, 43, 3, 36, 0, 33, 3, 45, 32, "d", 45, 33, 2, +["_H", 1, 33, 1, 33, 2, 32, "d", 33, 1, 33, 3, 33, 42, 33, 6, 44, 4, 42, 1, 43, 3, 36, 0, 33, 3, 45, 32, "d", 45, 33, 2, 45, 2, "print", 1, 35, 35, 33, 1, 33, 2, 32, "d", 33, 1, 33, 3, 33, 42, 33, 6, 44, 4, 42, 1, 43, 3, 36, 0, 33, 3, 45, 32, "d", 45, 33, 3, 45, 2, "print", 1, 35, 35, 33, 1, 33, 2, 32, "d", 33, 1, 33, 3, 33, 42, 33, 6, 44, 4, 42, 1, 43, 3, 36, 0, 33, 3, 45, 32, "d", 45, 33, 4, 45, 2, "print", 1, 35, 35, 32, "d", 33, 1, 33, 3, 33, 42, 33, 6, 44, 4, 42, 1, 36, diff --git a/hogvm/__tests__/__snapshots__/stl.hoge b/hogvm/__tests__/__snapshots__/stl.hoge index d9e6ec9c41f51..3f842a7d4156f 100644 --- a/hogvm/__tests__/__snapshots__/stl.hoge +++ b/hogvm/__tests__/__snapshots__/stl.hoge @@ -1,26 +1,26 @@ -["_h", 32, "-- empty, notEmpty, length, lower, upper, reverse --", 2, "print", 1, 35, 32, "234", 2, "notEmpty", 1, 32, -"", 2, "empty", 1, 3, 2, 40, 9, 32, "123", 2, "length", 1, 2, "print", 1, 35, 32, "tdd4gh", 32, "Tdd4gh", 2, "lower", 1, -11, 40, 9, 32, "test", 2, "upper", 1, 2, "print", 1, 35, 32, "spinner", 2, "reverse", 1, 2, "print", 1, 35, 32, "", 2, -"print", 1, 35, 32, "-- encodeURLComponent, decodeURLComponent --", 2, "print", 1, 35, 32, "http://www.google.com", 2, -"encodeURLComponent", 1, 2, "print", 1, 35, 32, "tom & jerry", 2, "encodeURLComponent", 1, 2, "print", 1, 35, 32, -"http://www.google.com", 2, "encodeURLComponent", 1, 2, "decodeURLComponent", 1, 2, "print", 1, 35, 32, "tom & jerry", -2, "encodeURLComponent", 1, 2, "decodeURLComponent", 1, 2, "print", 1, 35, 32, "", 2, "print", 1, 35, 32, -"-- base64Encode, base64Decode --", 2, "print", 1, 35, 32, "http://www.google.com", 2, "base64Encode", 1, 2, "print", 1, -35, 32, "tom & jerry", 2, "base64Encode", 1, 2, "print", 1, 35, 32, "http://www.google.com", 2, "base64Encode", 1, 2, -"base64Decode", 1, 2, "print", 1, 35, 32, "tom & jerry", 2, "base64Encode", 1, 2, "base64Decode", 1, 2, "print", 1, 35, -32, "", 2, "print", 1, 35, 32, "-- empty --", 2, "print", 1, 35, 31, 2, "empty", 1, 2, "print", 1, 35, 33, 0, 2, -"empty", 1, 2, "print", 1, 35, 33, 1, 2, "empty", 1, 2, "print", 1, 35, 33, -1, 2, "empty", 1, 2, "print", 1, 35, 34, -0.0, 2, "empty", 1, 2, "print", 1, 35, 34, 0.01, 2, "empty", 1, 2, "print", 1, 35, 32, "", 2, "empty", 1, 2, "print", 1, -35, 32, "string", 2, "empty", 1, 2, "print", 1, 35, 32, "0", 2, "empty", 1, 2, "print", 1, 35, 43, 0, 2, "empty", 1, 2, -"print", 1, 35, 42, 0, 2, "empty", 1, 2, "print", 1, 35, 2, "tuple", 0, 2, "empty", 1, 2, "print", 1, 35, 33, 0, 2, -"tuple", 1, 2, "empty", 1, 2, "print", 1, 35, 33, 2, 33, 1, 2, "tuple", 2, 2, "empty", 1, 2, "print", 1, 35, 32, "", 2, -"print", 1, 35, 32, "-- notEmpty --", 2, "print", 1, 35, 31, 2, "notEmpty", 1, 2, "print", 1, 35, 33, 0, 2, "notEmpty", -1, 2, "print", 1, 35, 33, 1, 2, "notEmpty", 1, 2, "print", 1, 35, 33, -1, 2, "notEmpty", 1, 2, "print", 1, 35, 34, 0.0, -2, "notEmpty", 1, 2, "print", 1, 35, 34, 0.01, 2, "notEmpty", 1, 2, "print", 1, 35, 32, "", 2, "notEmpty", 1, 2, -"print", 1, 35, 32, "string", 2, "notEmpty", 1, 2, "print", 1, 35, 32, "0", 2, "notEmpty", 1, 2, "print", 1, 35, 43, 0, -2, "notEmpty", 1, 2, "print", 1, 35, 42, 0, 2, "notEmpty", 1, 2, "print", 1, 35, 2, "tuple", 0, 2, "notEmpty", 1, 2, -"print", 1, 35, 33, 0, 2, "tuple", 1, 2, "notEmpty", 1, 2, "print", 1, 35, 33, 2, 33, 1, 2, "tuple", 2, 2, "notEmpty", -1, 2, "print", 1, 35, 32, "", 2, "print", 1, 35, 32, "-- replaceAll, replaceOne --", 2, "print", 1, 35, 32, "L", 32, -"l", 32, "hello world", 2, "replaceAll", 3, 2, "print", 1, 35, 32, "L", 32, "l", 32, "hello world", 2, "replaceOne", 3, -2, "print", 1, 35, 32, "", 2, "print", 1, 35, 32, "-- generateUUIDv4 --", 2, "print", 1, 35, 2, "generateUUIDv4", 0, 2, -"length", 1, 2, "print", 1, 35] +["_H", 1, 32, "-- empty, notEmpty, length, lower, upper, reverse --", 2, "print", 1, 35, 32, "", 2, "empty", 1, 32, +"234", 2, "notEmpty", 1, 3, 2, 40, 9, 32, "123", 2, "length", 1, 2, "print", 1, 35, 32, "tdd4gh", 32, "Tdd4gh", 2, +"lower", 1, 11, 40, 9, 32, "test", 2, "upper", 1, 2, "print", 1, 35, 32, "spinner", 2, "reverse", 1, 2, "print", 1, 35, +32, "", 2, "print", 1, 35, 32, "-- encodeURLComponent, decodeURLComponent --", 2, "print", 1, 35, 32, +"http://www.google.com", 2, "encodeURLComponent", 1, 2, "print", 1, 35, 32, "tom & jerry", 2, "encodeURLComponent", 1, +2, "print", 1, 35, 32, "http://www.google.com", 2, "encodeURLComponent", 1, 2, "decodeURLComponent", 1, 2, "print", 1, +35, 32, "tom & jerry", 2, "encodeURLComponent", 1, 2, "decodeURLComponent", 1, 2, "print", 1, 35, 32, "", 2, "print", 1, +35, 32, "-- base64Encode, base64Decode --", 2, "print", 1, 35, 32, "http://www.google.com", 2, "base64Encode", 1, 2, +"print", 1, 35, 32, "tom & jerry", 2, "base64Encode", 1, 2, "print", 1, 35, 32, "http://www.google.com", 2, +"base64Encode", 1, 2, "base64Decode", 1, 2, "print", 1, 35, 32, "tom & jerry", 2, "base64Encode", 1, 2, "base64Decode", +1, 2, "print", 1, 35, 32, "", 2, "print", 1, 35, 32, "-- empty --", 2, "print", 1, 35, 31, 2, "empty", 1, 2, "print", 1, +35, 33, 0, 2, "empty", 1, 2, "print", 1, 35, 33, 1, 2, "empty", 1, 2, "print", 1, 35, 33, -1, 2, "empty", 1, 2, "print", +1, 35, 34, 0.0, 2, "empty", 1, 2, "print", 1, 35, 34, 0.01, 2, "empty", 1, 2, "print", 1, 35, 32, "", 2, "empty", 1, 2, +"print", 1, 35, 32, "string", 2, "empty", 1, 2, "print", 1, 35, 32, "0", 2, "empty", 1, 2, "print", 1, 35, 43, 0, 2, +"empty", 1, 2, "print", 1, 35, 42, 0, 2, "empty", 1, 2, "print", 1, 35, 2, "tuple", 0, 2, "empty", 1, 2, "print", 1, 35, +33, 0, 2, "tuple", 1, 2, "empty", 1, 2, "print", 1, 35, 33, 1, 33, 2, 2, "tuple", 2, 2, "empty", 1, 2, "print", 1, 35, +32, "", 2, "print", 1, 35, 32, "-- notEmpty --", 2, "print", 1, 35, 31, 2, "notEmpty", 1, 2, "print", 1, 35, 33, 0, 2, +"notEmpty", 1, 2, "print", 1, 35, 33, 1, 2, "notEmpty", 1, 2, "print", 1, 35, 33, -1, 2, "notEmpty", 1, 2, "print", 1, +35, 34, 0.0, 2, "notEmpty", 1, 2, "print", 1, 35, 34, 0.01, 2, "notEmpty", 1, 2, "print", 1, 35, 32, "", 2, "notEmpty", +1, 2, "print", 1, 35, 32, "string", 2, "notEmpty", 1, 2, "print", 1, 35, 32, "0", 2, "notEmpty", 1, 2, "print", 1, 35, +43, 0, 2, "notEmpty", 1, 2, "print", 1, 35, 42, 0, 2, "notEmpty", 1, 2, "print", 1, 35, 2, "tuple", 0, 2, "notEmpty", 1, +2, "print", 1, 35, 33, 0, 2, "tuple", 1, 2, "notEmpty", 1, 2, "print", 1, 35, 33, 1, 33, 2, 2, "tuple", 2, 2, +"notEmpty", 1, 2, "print", 1, 35, 32, "", 2, "print", 1, 35, 32, "-- replaceAll, replaceOne --", 2, "print", 1, 35, 32, +"hello world", 32, "l", 32, "L", 2, "replaceAll", 3, 2, "print", 1, 35, 32, "hello world", 32, "l", 32, "L", 2, +"replaceOne", 3, 2, "print", 1, 35, 32, "", 2, "print", 1, 35, 32, "-- generateUUIDv4 --", 2, "print", 1, 35, 2, +"generateUUIDv4", 0, 2, "length", 1, 2, "print", 1, 35] diff --git a/hogvm/__tests__/__snapshots__/strings.hoge b/hogvm/__tests__/__snapshots__/strings.hoge index 11abe250f3a09..0c3118d3a043e 100644 --- a/hogvm/__tests__/__snapshots__/strings.hoge +++ b/hogvm/__tests__/__snapshots__/strings.hoge @@ -1,11 +1,11 @@ -["_h", 32, " hello world ", 2, "trim", 1, 2, "print", 1, 35, 32, " hello world ", 2, "trimLeft", 1, 2, "print", 1, -35, 32, " hello world ", 2, "trimRight", 1, 2, "print", 1, 35, 32, "x", 32, "xxxx hello world xx", 2, "trim", 2, 2, -"print", 1, 35, 32, "x", 32, "xxxx hello world xx", 2, "trimLeft", 2, 2, "print", 1, 35, 32, "x", 32, -"xxxx hello world xx", 2, "trimRight", 2, 2, "print", 1, 35, 32, "hello world and more", 32, " ", 2, "splitByString", -2, 2, "print", 1, 35, 33, 1, 32, "hello world and more", 32, " ", 2, "splitByString", 3, 2, "print", 1, 35, 33, 2, 32, -"hello world and more", 32, " ", 2, "splitByString", 3, 2, "print", 1, 35, 33, 10, 32, "hello world and more", 32, " ", -2, "splitByString", 3, 2, "print", 1, 35, 32, "N", 32, "banana", 2, "like", 2, 2, "print", 1, 35, 32, "n", 32, "banana", -2, "like", 2, 2, "print", 1, 35, 32, "naan", 32, "banana", 2, "like", 2, 2, "print", 1, 35, 32, "N", 32, "banana", 2, -"ilike", 2, 2, "print", 1, 35, 32, "n", 32, "banana", 2, "ilike", 2, 2, "print", 1, 35, 32, "naan", 32, "banana", 2, -"ilike", 2, 2, "print", 1, 35, 32, "N", 32, "banana", 2, "notLike", 2, 2, "print", 1, 35, 32, "NO", 32, "banana", 2, -"notILike", 2, 2, "print", 1, 35] +["_H", 1, 32, " hello world ", 2, "trim", 1, 2, "print", 1, 35, 32, " hello world ", 2, "trimLeft", 1, 2, "print", +1, 35, 32, " hello world ", 2, "trimRight", 1, 2, "print", 1, 35, 32, "xxxx hello world xx", 32, "x", 2, "trim", 2, +2, "print", 1, 35, 32, "xxxx hello world xx", 32, "x", 2, "trimLeft", 2, 2, "print", 1, 35, 32, +"xxxx hello world xx", 32, "x", 2, "trimRight", 2, 2, "print", 1, 35, 32, " ", 32, "hello world and more", 2, +"splitByString", 2, 2, "print", 1, 35, 32, " ", 32, "hello world and more", 33, 1, 2, "splitByString", 3, 2, "print", 1, +35, 32, " ", 32, "hello world and more", 33, 2, 2, "splitByString", 3, 2, "print", 1, 35, 32, " ", 32, +"hello world and more", 33, 10, 2, "splitByString", 3, 2, "print", 1, 35, 32, "banana", 32, "N", 2, "like", 2, 2, +"print", 1, 35, 32, "banana", 32, "n", 2, "like", 2, 2, "print", 1, 35, 32, "banana", 32, "naan", 2, "like", 2, 2, +"print", 1, 35, 32, "banana", 32, "N", 2, "ilike", 2, 2, "print", 1, 35, 32, "banana", 32, "n", 2, "ilike", 2, 2, +"print", 1, 35, 32, "banana", 32, "naan", 2, "ilike", 2, 2, "print", 1, 35, 32, "banana", 32, "N", 2, "notLike", 2, 2, +"print", 1, 35, 32, "banana", 32, "NO", 2, "notILike", 2, 2, "print", 1, 35] diff --git a/hogvm/__tests__/__snapshots__/tuples.hoge b/hogvm/__tests__/__snapshots__/tuples.hoge index 027d8ad266374..eb8c97ec6bf03 100644 --- a/hogvm/__tests__/__snapshots__/tuples.hoge +++ b/hogvm/__tests__/__snapshots__/tuples.hoge @@ -1,5 +1,5 @@ -["_h", 2, "tuple", 0, 2, "print", 1, 35, 33, 1, 44, 1, 2, "print", 1, 35, 33, 1, 33, 2, 44, 2, 2, "print", 1, 35, 33, 1, -33, 2, 44, 2, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 44, 3, 2, "print", 1, 35, 33, 1, 32, "2", 33, 3, 44, 3, 2, +["_H", 1, 2, "tuple", 0, 2, "print", 1, 35, 33, 1, 44, 1, 2, "print", 1, 35, 33, 1, 33, 2, 44, 2, 2, "print", 1, 35, 33, +1, 33, 2, 44, 2, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 44, 3, 2, "print", 1, 35, 33, 1, 32, "2", 33, 3, 44, 3, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 44, 2, 33, 4, 44, 3, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 33, 4, 44, 2, 44, 2, 33, 5, 44, 3, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 44, 3, 36, 0, 33, 2, 45, 2, "print", 1, 35, 36, 0, 33, 2, 48, 2, "print", 1, 35, 36, 0, 33, 8, 48, 2, "print", 1, 35, 33, 1, 33, 2, 33, 3, 33, 4, 44, 2, 44, 2, 33, 5, 44, 3, 33, 2, 45, diff --git a/hogvm/__tests__/__snapshots__/variables.hoge b/hogvm/__tests__/__snapshots__/variables.hoge index b3f91623a1b0d..91f17e1f63b04 100644 --- a/hogvm/__tests__/__snapshots__/variables.hoge +++ b/hogvm/__tests__/__snapshots__/variables.hoge @@ -1,3 +1,3 @@ -["_h", 32, "-- test variables --", 2, "print", 1, 35, 33, 2, 33, 1, 6, 36, 0, 2, "print", 1, 35, 33, 4, 36, 0, 6, 36, 1, -2, "print", 1, 35, 35, 35, 32, "-- test variable reassignment --", 2, "print", 1, 35, 33, 1, 33, 3, 36, 0, 6, 37, 0, 33, -2, 36, 0, 8, 37, 0, 36, 0, 2, "print", 1, 35, 35] +["_H", 1, 32, "-- test variables --", 2, "print", 1, 35, 33, 2, 33, 1, 6, 36, 0, 2, "print", 1, 35, 33, 4, 36, 0, 6, 36, +1, 2, "print", 1, 35, 35, 35, 32, "-- test variable reassignment --", 2, "print", 1, 35, 33, 1, 33, 3, 36, 0, 6, 37, 0, +33, 2, 36, 0, 8, 37, 0, 36, 0, 2, "print", 1, 35, 35] diff --git a/hogvm/__tests__/dicts.hog b/hogvm/__tests__/dicts.hog index ccd081190b0da..8523c26cb5f8d 100644 --- a/hogvm/__tests__/dicts.hog +++ b/hogvm/__tests__/dicts.hog @@ -2,7 +2,6 @@ print({}) print({'key': 'value'}) print({'key': 'value', 'other': 'thing'}) print({'key': {'otherKey': 'value'}}) -print({key: 'value'}) let key := 3 print({key: 'value'}) diff --git a/hogvm/__tests__/loops.hog b/hogvm/__tests__/loops.hog index 369baf58a2339..f43fe7fcc1c39 100644 --- a/hogvm/__tests__/loops.hog +++ b/hogvm/__tests__/loops.hog @@ -11,9 +11,8 @@ print('-- test while loop --') print('-- test for loop --') { for (let i := 0; i < 3; i := i + 1) { - print(i) -- prints 3 times + print(i) // prints 3 times } - print(i) -- global does not print } print('-- test emptier for loop --') diff --git a/hogvm/__tests__/mandelbrot.hog b/hogvm/__tests__/mandelbrot.hog index 6ac17b2989d57..144ddf701ab87 100644 --- a/hogvm/__tests__/mandelbrot.hog +++ b/hogvm/__tests__/mandelbrot.hog @@ -41,4 +41,4 @@ fn main() { } } -main() \ No newline at end of file +main() diff --git a/hogvm/__tests__/operations.hog b/hogvm/__tests__/operations.hog index 25eca892ab190..8445bf17cee67 100644 --- a/hogvm/__tests__/operations.hog +++ b/hogvm/__tests__/operations.hog @@ -3,68 +3,64 @@ fn test(val) { } print('-- test the most common expressions --') -test(1 + 2) -- 3 -test(1 - 2) -- -1 -test(3 * 2) -- 6 -test(3 / 2) -- 1.5 -test(3 % 2) -- 1 -test(1 and 2) -- true -test(1 or 0) -- true -test(1 and 0) -- false -test(1 or (0 and 1) or 2) -- true -test((1 and 0) and 1) -- false -test((1 or 2) and (1 or 2)) -- true -test(true) -- true -test(not true) -- false -test(false) -- false -test(null) -- null -test(3.14) -- 3.14 -test(1 = 2) -- false -test(1 == 2) -- false -test(1 != 2) -- true -test(1 < 2) -- true -test(1 <= 2) -- true -test(1 > 2) -- false -test(1 >= 2) -- false -test('a' like 'b') -- false -test('baa' like '%a%') -- true -test('baa' like '%x%') -- false -test('baa' ilike '%A%') -- true -test('baa' ilike '%C%') -- false -test('a' ilike 'b') -- false -test('a' not like 'b') -- true -test('a' not ilike 'b') -- true -test('a' in 'car') -- true -test('a' in 'foo') -- false -test('a' not in 'car') -- false -test(properties.bla) -- null -test(properties.foo) -- "bar" -test(ifNull(properties.foo, false)) -- "bar" -test(ifNull(properties.nullValue, false)) -- false -test(concat('arg', 'another')) -- 'arganother' -test(concat(1, NULL)) -- '1' -test(concat(true, false)) -- 'truefalse' -test(match('test', 'e.*')) -- true -test(match('test', '^e.*')) -- false -test(match('test', 'x.*')) -- false -test('test' =~ 'e.*') -- true -test('test' !~ 'e.*') -- false -test('test' =~ '^e.*') -- false -test('test' !~ '^e.*') -- true -test('test' =~ 'x.*') -- false -test('test' !~ 'x.*') -- true -test('test' ~* 'EST') -- true -test('test' =~* 'EST') -- true -test('test' !~* 'EST') -- false -test(toString(1)) -- '1' -test(toString(1.5)) -- '1.5' -test(toString(true)) -- 'true' -test(toString(null)) -- 'null' -test(toString('string')) -- 'string' -test(toInt('1')) -- 1 -test(toInt('bla')) -- null -test(toFloat('1.2')) -- 1.2 -test(toFloat('bla')) -- null -test(toUUID('asd')) -- 'asd' -test(1 == null) -- false -test(1 != null) -- true +test(1 + 2) // 3 +test(1 - 2) // -1 +test(3 * 2) // 6 +test(3 / 2) // 1.5 +test(3 % 2) // 1 +test(1 and 2) // true +test(1 or 0) // true +test(1 and 0) // false +test(1 or (0 and 1) or 2) // true +test((1 and 0) and 1) // false +test((1 or 2) and (1 or 2)) // true +test(true) // true +test(not true) // false +test(false) // false +test(null) // null +test(3.14) // 3.14 +test(1 = 2) // false +test(1 == 2) // false +test(1 != 2) // true +test(1 < 2) // true +test(1 <= 2) // true +test(1 > 2) // false +test(1 >= 2) // false +test('a' like 'b') // false +test('baa' like '%a%') // true +test('baa' like '%x%') // false +test('baa' ilike '%A%') // true +test('baa' ilike '%C%') // false +test('a' ilike 'b') // false +test('a' not like 'b') // true +test('a' not ilike 'b') // true +test('a' in 'car') // true +test('a' in 'foo') // false +test('a' not in 'car') // false +test(concat('arg', 'another')) // 'arganother' +test(concat(1, NULL)) // '1' +test(concat(true, false)) // 'truefalse' +test(match('test', 'e.*')) // true +test(match('test', '^e.*')) // false +test(match('test', 'x.*')) // false +test('test' =~ 'e.*') // true +test('test' !~ 'e.*') // false +test('test' =~ '^e.*') // false +test('test' !~ '^e.*') // true +test('test' =~ 'x.*') // false +test('test' !~ 'x.*') // true +test('test' ~* 'EST') // true +test('test' =~* 'EST') // true +test('test' !~* 'EST') // false +test(toString(1)) // '1' +test(toString(1.5)) // '1.5' +test(toString(true)) // 'true' +test(toString(null)) // 'null' +test(toString('string')) // 'string' +test(toInt('1')) // 1 +test(toInt('bla')) // null +test(toFloat('1.2')) // 1.2 +test(toFloat('bla')) // null +test(toUUID('asd')) // 'asd' +test(1 == null) // false +test(1 != null) // true diff --git a/hogvm/python/debugger.py b/hogvm/python/debugger.py index 270c1a5233c01..10d4355e8c1c4 100644 --- a/hogvm/python/debugger.py +++ b/hogvm/python/debugger.py @@ -163,8 +163,8 @@ def print_symbol(symbol: Operation, ip: int, bytecode: list, stack: list, call_s ) case Operation.DECLARE_FN: return f"DECLARE_FN({bytecode[ip+1]}, args={bytecode[ip+2]}, ops={bytecode[ip+3]})" - case Operation.CALL: - return f"CALL({bytecode[ip+1]} {', '.join(str(stack[-i]) for i in range(bytecode[ip+2]))})" + case Operation.CALL_GLOBAL: + return f"CALL_GLOBAL({bytecode[ip+1]} {', '.join(str(stack[-(bytecode[ip+2] - i)]) for i in range(bytecode[ip+2]))})" case Operation.TRY: return f"TRY(+{bytecode[ip+1]})" case Operation.POP_TRY: @@ -177,8 +177,8 @@ def print_symbol(symbol: Operation, ip: int, bytecode: list, stack: list, call_s def color_bytecode(bytecode: list) -> list: - colored = ["op.START"] - ip = 1 + colored = ["op.START", f"version: {bytecode[1]}"] if bytecode[0] == "_H" else ["op.START"] + ip = len(colored) while ip < len(bytecode): symbol = bytecode[ip] match symbol: @@ -274,8 +274,8 @@ def color_bytecode(bytecode: list) -> list: add = ["op.JUMP_IF_STACK_NOT_NULL", f"offset: {'+' if bytecode[ip+1] >= 0 else ''}{bytecode[ip+1]}"] case Operation.DECLARE_FN: add = ["op.DECLARE_FN", f"name: {bytecode[ip+1]}", f"args: {bytecode[ip+2]}", f"ops: {bytecode[ip+3]}"] - case Operation.CALL: - add = ["op.CALL", f"name: {bytecode[ip+1]}", f"args: {bytecode[ip+2]}"] + case Operation.CALL_GLOBAL: + add = ["op.CALL_GLOBAL", f"name: {bytecode[ip+1]}", f"args: {bytecode[ip+2]}"] case Operation.TRY: add = ["op.TRY", f"catch: +{bytecode[ip+1]}"] case Operation.POP_TRY: diff --git a/hogvm/python/execute.py b/hogvm/python/execute.py index c137868d026e5..1f0a47e1bd1d9 100644 --- a/hogvm/python/execute.py +++ b/hogvm/python/execute.py @@ -7,7 +7,7 @@ from hogvm.python.debugger import debugger, color_bytecode from hogvm.python.objects import is_hog_error -from hogvm.python.operation import Operation, HOGQL_BYTECODE_IDENTIFIER +from hogvm.python.operation import Operation, HOGQL_BYTECODE_IDENTIFIER, HOGQL_BYTECODE_IDENTIFIER_V0 from hogvm.python.stl import STL from dataclasses import dataclass @@ -41,6 +41,9 @@ def execute_bytecode( team: Optional["Team"] = None, debug=False, ) -> BytecodeResult: + if len(bytecode) == 0 or (bytecode[0] not in (HOGQL_BYTECODE_IDENTIFIER, HOGQL_BYTECODE_IDENTIFIER_V0)): + raise HogVMException(f"Invalid bytecode. Must start with '{HOGQL_BYTECODE_IDENTIFIER}'") + version = bytecode[1] if len(bytecode) >= 2 and bytecode[0] == HOGQL_BYTECODE_IDENTIFIER else 0 result = None start_time = time.time() last_op = len(bytecode) - 1 @@ -51,13 +54,19 @@ def execute_bytecode( declared_functions: dict[str, tuple[int, int]] = {} mem_used = 0 max_mem_used = 0 - ip = -1 + ip = 1 if version > 0 else 0 ops = 0 stdout: list[str] = [] colored_bytecode = color_bytecode(bytecode) if debug else [] if isinstance(timeout, int): timeout = timedelta(seconds=timeout) + def stack_keep_first_elements(count: int): + nonlocal stack, mem_stack, mem_used + stack = stack[0:count] + mem_used -= sum(mem_stack[count:]) + mem_stack = mem_stack[0:count] + def next_token(): nonlocal ip ip += 1 @@ -82,10 +91,7 @@ def push_stack(value): if mem_used > MAX_MEMORY: raise HogVMException(f"Memory limit of {MAX_MEMORY} bytes exceeded. Tried to allocate {mem_used} bytes.") - if next_token() != HOGQL_BYTECODE_IDENTIFIER: - raise HogVMException(f"Invalid bytecode. Must start with '{HOGQL_BYTECODE_IDENTIFIER}'") - - if len(bytecode) == 1: + if len(bytecode) <= 2: return BytecodeResult(result=None, stdout=stdout, bytecode=bytecode) def check_timeout(): @@ -170,16 +176,17 @@ def check_timeout(): push_stack(not bool(re.search(re.compile(args[1], re.RegexFlag.IGNORECASE), args[0]))) case Operation.GET_GLOBAL: chain = [pop_stack() for _ in range(next_token())] - push_stack(deepcopy(get_nested_value(globals, chain))) + if globals and chain[0] in globals: + push_stack(deepcopy(get_nested_value(globals, chain))) + else: + raise HogVMException(f"Global variable not found: {chain[0]}") case Operation.POP: pop_stack() case Operation.RETURN: if call_stack: ip, stack_start, arg_len = call_stack.pop() response = pop_stack() - stack = stack[0:stack_start] - mem_used -= sum(mem_stack[stack_start:]) - mem_stack = mem_stack[0:stack_start] + stack_keep_first_elements(stack_start) push_stack(response) else: return BytecodeResult(result=pop_stack(), stdout=stdout, bytecode=bytecode) @@ -252,7 +259,7 @@ def check_timeout(): body_len = next_token() declared_functions[name] = (ip, arg_len) ip += body_len - case Operation.CALL: + case Operation.CALL_GLOBAL: check_timeout() name = next_token() if name in declared_functions: @@ -260,16 +267,17 @@ def check_timeout(): call_stack.append((ip + 1, len(stack) - arg_len, arg_len)) ip = func_ip else: - args = [pop_stack() for _ in range(next_token())] + arg_count = next_token() + args = [pop_stack() for _ in range(arg_count)] + if version > 0: + args = list(reversed(args)) if functions is not None and name in functions: push_stack(functions[name](*args)) - continue - - if name not in STL: + elif name in STL: + push_stack(STL[name](args, team, stdout, timeout.total_seconds())) + else: raise HogVMException(f"Unsupported function call: {name}") - - push_stack(STL[name](args, team, stdout, timeout.total_seconds())) case Operation.TRY: throw_stack.append((len(call_stack), len(stack), ip + next_token())) case Operation.POP_TRY: @@ -283,9 +291,7 @@ def check_timeout(): raise HogVMException("Can not throw: value is not of type Error") if throw_stack: call_stack_len, stack_len, catch_ip = throw_stack.pop() - stack = stack[0:stack_len] - mem_used -= sum(mem_stack[stack_len:]) - mem_stack = mem_stack[0:stack_len] + stack_keep_first_elements(stack_len) call_stack = call_stack[0:call_stack_len] push_stack(exception) ip = catch_ip diff --git a/hogvm/python/operation.py b/hogvm/python/operation.py index 319a450fc37b1..0feb79ebfac2e 100644 --- a/hogvm/python/operation.py +++ b/hogvm/python/operation.py @@ -1,12 +1,18 @@ from enum import Enum -HOGQL_BYTECODE_IDENTIFIER = "_h" +HOGQL_BYTECODE_IDENTIFIER_V0 = "_h" +HOGQL_BYTECODE_IDENTIFIER = "_H" HOGQL_BYTECODE_FUNCTION = "_f" +# Version history: +# 0 - initial version +# 1 - added version numbers; reversed order of function args in stack +HOGQL_BYTECODE_VERSION = 1 + class Operation(int, Enum): GET_GLOBAL = 1 - CALL = 2 + CALL_GLOBAL = 2 AND = 3 OR = 4 NOT = 5 diff --git a/hogvm/python/test/test_execute.py b/hogvm/python/test/test_execute.py index b5c1a3d8ee2ae..7289a1b585eff 100644 --- a/hogvm/python/test/test_execute.py +++ b/hogvm/python/test/test_execute.py @@ -4,7 +4,7 @@ from hogvm.python.execute import execute_bytecode, get_nested_value -from hogvm.python.operation import Operation as op, HOGQL_BYTECODE_IDENTIFIER as _H +from hogvm.python.operation import Operation as op, HOGQL_BYTECODE_IDENTIFIER as _H, HOGQL_BYTECODE_VERSION as VERSION from hogvm.python.utils import UncaughtHogVMException from posthog.hogql.bytecode import create_bytecode from posthog.hogql.parser import parse_expr, parse_program @@ -115,21 +115,21 @@ def test_nested_value(self): def test_errors(self): try: - execute_bytecode([_H, op.TRUE, op.CALL, "notAFunction", 1], {}) + execute_bytecode([_H, VERSION, op.TRUE, op.CALL_GLOBAL, "notAFunction", 1], {}) except Exception as e: assert str(e) == "Unsupported function call: notAFunction" else: raise AssertionError("Expected Exception not raised") try: - execute_bytecode([_H, op.CALL, "notAFunction", 1], {}) + execute_bytecode([_H, VERSION, op.CALL_GLOBAL, "notAFunction", 1], {}) except Exception as e: assert str(e) == "Stack underflow" else: raise AssertionError("Expected Exception not raised") try: - execute_bytecode([_H, op.TRUE, op.TRUE, op.NOT], {}) + execute_bytecode([_H, VERSION, op.TRUE, op.TRUE, op.NOT], {}) except Exception as e: assert str(e) == "Invalid bytecode. More than one value left on stack" else: @@ -182,10 +182,6 @@ def test_memory_limits_1(self): raise AssertionError("Expected Exception not raised") def test_memory_limits_2(self): - # let string := 'banana' - # for (let i := 0; i < 100; i := i + 1) { - # string := string || string - # } bytecode = [ "_h", 32, @@ -281,27 +277,40 @@ def stringify(*args): return "zero" functions = {"stringify": stringify} - assert execute_bytecode([_H, op.INTEGER, 1, op.CALL, "stringify", 1, op.RETURN], {}, functions).result == "one" - assert execute_bytecode([_H, op.INTEGER, 2, op.CALL, "stringify", 1, op.RETURN], {}, functions).result == "two" assert ( - execute_bytecode([_H, op.STRING, "2", op.CALL, "stringify", 1, op.RETURN], {}, functions).result == "zero" + execute_bytecode( + [_H, VERSION, op.INTEGER, 1, op.CALL_GLOBAL, "stringify", 1, op.RETURN], {}, functions + ).result + == "one" + ) + assert ( + execute_bytecode( + [_H, VERSION, op.INTEGER, 2, op.CALL_GLOBAL, "stringify", 1, op.RETURN], {}, functions + ).result + == "two" + ) + assert ( + execute_bytecode( + [_H, VERSION, op.STRING, "2", op.CALL_GLOBAL, "stringify", 1, op.RETURN], {}, functions + ).result + == "zero" + ) + + def test_version_0_and_1(self): + # version 0 of HogQL bytecode had arguments in a different order + assert ( + execute_bytecode(["_h", op.STRING, "1", op.STRING, "2", op.CALL_GLOBAL, "concat", 2, op.RETURN]).result + == "21" + ) + assert ( + execute_bytecode(["_H", 1, op.STRING, "1", op.STRING, "2", op.CALL_GLOBAL, "concat", 2, op.RETURN]).result + == "12" ) def test_bytecode_variable_assignment(self): program = parse_program("let a := 1 + 2; return a;") bytecode = create_bytecode(program) - assert bytecode == [ - _H, - op.INTEGER, - 2, - op.INTEGER, - 1, - op.PLUS, - op.GET_LOCAL, - 0, - op.RETURN, - op.POP, - ] + assert bytecode == ["_H", 1, op.INTEGER, 2, op.INTEGER, 1, op.PLUS, op.GET_LOCAL, 0, op.RETURN, op.POP] assert self._run_program("let a := 1 + 2; return a;") == 3 assert ( @@ -319,7 +328,8 @@ def test_bytecode_if_else(self): program = parse_program("if (true) return 1; else return 2;") bytecode = create_bytecode(program) assert bytecode == [ - _H, + "_H", + 1, op.TRUE, op.JUMP_IF_FALSE, 5, @@ -371,7 +381,8 @@ def test_bytecode_while(self): program = parse_program("while (true) 1 + 1;") bytecode = create_bytecode(program) assert bytecode == [ - _H, + "_H", + 1, op.TRUE, op.JUMP_IF_FALSE, 8, @@ -388,10 +399,11 @@ def test_bytecode_while(self): program = parse_program("while (toString('a')) { 1 + 1; } return 3;") bytecode = create_bytecode(program) assert bytecode == [ - _H, + "_H", + 1, op.STRING, "a", - op.CALL, + op.CALL_GLOBAL, "toString", 1, op.JUMP_IF_FALSE, @@ -466,7 +478,7 @@ def test_bytecode_for(self): print(i) -- prints 3 times j := j + 2 } - print(i) -- global does not print + // print(i) -- global does not print return j """ ) @@ -484,22 +496,23 @@ def test_bytecode_functions(self): ) bytecode = create_bytecode(program) assert bytecode == [ - _H, + "_H", + 1, op.DECLARE_FN, "add", 2, 6, op.GET_LOCAL, - 0, - op.GET_LOCAL, 1, + op.GET_LOCAL, + 0, op.PLUS, op.RETURN, op.INTEGER, - 4, - op.INTEGER, 3, - op.CALL, + op.INTEGER, + 4, + op.CALL_GLOBAL, "add", 2, op.RETURN, @@ -620,7 +633,12 @@ def test_bytecode_dicts(self): assert self._run_program("return {'key': 'value'};") == {"key": "value"} assert self._run_program("return {'key': 'value', 'other': 'thing'};") == {"key": "value", "other": "thing"} assert self._run_program("return {'key': {'otherKey': 'value'}};") == {"key": {"otherKey": "value"}} - assert self._run_program("return {key: 'value'};") == {None: "value"} + try: + self._run_program("return {key: 'value'};") + except Exception as e: + assert str(e) == "Global variable not found: key" + else: + raise AssertionError("Expected Exception not raised") assert self._run_program("let key := 3; return {key: 'value'};") == {3: "value"} assert self._run_program("return {'key': 'value'}.key;") == "value" diff --git a/hogvm/typescript/package.json b/hogvm/typescript/package.json index aff5560bc6973..90291d4afa369 100644 --- a/hogvm/typescript/package.json +++ b/hogvm/typescript/package.json @@ -1,6 +1,6 @@ { "name": "@posthog/hogvm", - "version": "1.0.32", + "version": "1.0.36", "description": "PostHog Hog Virtual Machine", "types": "dist/index.d.ts", "main": "dist/index.js", diff --git a/hogvm/typescript/src/__tests__/execute.test.ts b/hogvm/typescript/src/__tests__/execute.test.ts index 435b7d873abfe..9efe09b2dd9b7 100644 --- a/hogvm/typescript/src/__tests__/execute.test.ts +++ b/hogvm/typescript/src/__tests__/execute.test.ts @@ -63,24 +63,24 @@ describe('hogvm execute', () => { expect(execSync(['_h', op.STRING, 'AL', op.STRING, 'kala', op.NOT_IREGEX], options)).toBe(false) expect(execSync(['_h', op.STRING, 'bla', op.STRING, 'properties', op.GET_GLOBAL, 2], options)).toBe(null) expect(execSync(['_h', op.STRING, 'foo', op.STRING, 'properties', op.GET_GLOBAL, 2], options)).toBe('bar') - expect(execSync(['_h', op.STRING, 'another', op.STRING, 'arg', op.CALL, 'concat', 2], options)).toBe( + expect(execSync(['_h', op.STRING, 'another', op.STRING, 'arg', op.CALL_GLOBAL, 'concat', 2], options)).toBe( 'arganother' ) - expect(execSync(['_h', op.NULL, op.INTEGER, 1, op.CALL, 'concat', 2], options)).toBe('1') - expect(execSync(['_h', op.FALSE, op.TRUE, op.CALL, 'concat', 2], options)).toBe('truefalse') - expect(execSync(['_h', op.STRING, 'e.*', op.STRING, 'test', op.CALL, 'match', 2], options)).toBe(true) - expect(execSync(['_h', op.STRING, '^e.*', op.STRING, 'test', op.CALL, 'match', 2], options)).toBe(false) - expect(execSync(['_h', op.STRING, 'x.*', op.STRING, 'test', op.CALL, 'match', 2], options)).toBe(false) - expect(execSync(['_h', op.INTEGER, 1, op.CALL, 'toString', 1], options)).toBe('1') - expect(execSync(['_h', op.FLOAT, 1.5, op.CALL, 'toString', 1], options)).toBe('1.5') - expect(execSync(['_h', op.TRUE, op.CALL, 'toString', 1], options)).toBe('true') - expect(execSync(['_h', op.NULL, op.CALL, 'toString', 1], options)).toBe('null') - expect(execSync(['_h', op.STRING, 'string', op.CALL, 'toString', 1], options)).toBe('string') - expect(execSync(['_h', op.STRING, '1', op.CALL, 'toInt', 1], options)).toBe(1) - expect(execSync(['_h', op.STRING, 'bla', op.CALL, 'toInt', 1], options)).toBe(null) - expect(execSync(['_h', op.STRING, '1.2', op.CALL, 'toFloat', 1], options)).toBe(1.2) - expect(execSync(['_h', op.STRING, 'bla', op.CALL, 'toFloat', 1], options)).toBe(null) - expect(execSync(['_h', op.STRING, 'asd', op.CALL, 'toUUID', 1], options)).toBe('asd') + expect(execSync(['_h', op.NULL, op.INTEGER, 1, op.CALL_GLOBAL, 'concat', 2], options)).toBe('1') + expect(execSync(['_h', op.FALSE, op.TRUE, op.CALL_GLOBAL, 'concat', 2], options)).toBe('truefalse') + expect(execSync(['_h', op.STRING, 'e.*', op.STRING, 'test', op.CALL_GLOBAL, 'match', 2], options)).toBe(true) + expect(execSync(['_h', op.STRING, '^e.*', op.STRING, 'test', op.CALL_GLOBAL, 'match', 2], options)).toBe(false) + expect(execSync(['_h', op.STRING, 'x.*', op.STRING, 'test', op.CALL_GLOBAL, 'match', 2], options)).toBe(false) + expect(execSync(['_h', op.INTEGER, 1, op.CALL_GLOBAL, 'toString', 1], options)).toBe('1') + expect(execSync(['_h', op.FLOAT, 1.5, op.CALL_GLOBAL, 'toString', 1], options)).toBe('1.5') + expect(execSync(['_h', op.TRUE, op.CALL_GLOBAL, 'toString', 1], options)).toBe('true') + expect(execSync(['_h', op.NULL, op.CALL_GLOBAL, 'toString', 1], options)).toBe('null') + expect(execSync(['_h', op.STRING, 'string', op.CALL_GLOBAL, 'toString', 1], options)).toBe('string') + expect(execSync(['_h', op.STRING, '1', op.CALL_GLOBAL, 'toInt', 1], options)).toBe(1) + expect(execSync(['_h', op.STRING, 'bla', op.CALL_GLOBAL, 'toInt', 1], options)).toBe(null) + expect(execSync(['_h', op.STRING, '1.2', op.CALL_GLOBAL, 'toFloat', 1], options)).toBe(1.2) + expect(execSync(['_h', op.STRING, 'bla', op.CALL_GLOBAL, 'toFloat', 1], options)).toBe(null) + expect(execSync(['_h', op.STRING, 'asd', op.CALL_GLOBAL, 'toUUID', 1], options)).toBe('asd') expect(execSync(['_h', op.NULL, op.INTEGER, 1, op.EQ], options)).toBe(false) expect(execSync(['_h', op.NULL, op.INTEGER, 1, op.NOT_EQ], options)).toBe(true) @@ -89,19 +89,34 @@ describe('hogvm execute', () => { test('error handling', async () => { const globals = { properties: { foo: 'bar' } } const options = { globals } - expect(() => execSync([], options)).toThrow("Invalid HogQL bytecode, must start with '_h'") - await expect(execAsync([], options)).rejects.toThrow("Invalid HogQL bytecode, must start with '_h'") + expect(() => execSync([], options)).toThrow("Invalid HogQL bytecode, must start with '_H'") + await expect(execAsync([], options)).rejects.toThrow("Invalid HogQL bytecode, must start with '_H'") + expect(() => execSync(['_h', op.INTEGER, 2, op.INTEGER, 1, 'InvalidOp'], options)).toThrow( 'Unexpected node while running bytecode: InvalidOp' ) expect(() => - execSync(['_h', op.STRING, 'another', op.STRING, 'arg', op.CALL, 'invalidFunc', 2], options) + execSync(['_h', op.STRING, 'another', op.STRING, 'arg', op.CALL_GLOBAL, 'invalidFunc', 2], options) ).toThrow('Unsupported function call: invalidFunc') expect(() => execSync(['_h', op.INTEGER], options)).toThrow('Unexpected end of bytecode') - expect(() => execSync(['_h', op.CALL, 'match', 1], options)).toThrow('Not enough arguments on the stack') + expect(() => execSync(['_h', op.CALL_GLOBAL, 'match', 1], options)).toThrow('Not enough arguments on the stack') expect(() => execSync(['_h', op.TRUE, op.TRUE, op.NOT], options)).toThrow( 'Invalid bytecode. More than one value left on stack' ) + + expect(() => execSync(['_H', 1, op.INTEGER, 2, op.INTEGER, 1, 'InvalidOp'], options)).toThrow( + 'Unexpected node while running bytecode: InvalidOp' + ) + expect(() => + execSync(['_H', 1, op.STRING, 'another', op.STRING, 'arg', op.CALL_GLOBAL, 'invalidFunc', 2], options) + ).toThrow('Unsupported function call: invalidFunc') + expect(() => execSync(['_H', 1, op.INTEGER], options)).toThrow('Unexpected end of bytecode') + expect(() => execSync(['_H', 1, op.CALL_GLOBAL, 'match', 1], options)).toThrow( + 'Not enough arguments on the stack' + ) + expect(() => execSync(['_H', 1, op.TRUE, op.TRUE, op.NOT], options)).toThrow( + 'Invalid bytecode. More than one value left on stack' + ) }) test('async limits', async () => { @@ -288,9 +303,14 @@ describe('hogvm execute', () => { return 'zero' }, } - expect(execSync(['_h', op.INTEGER, 1, op.CALL, 'stringify', 1], { functions })).toBe('one') - expect(execSync(['_h', op.INTEGER, 2, op.CALL, 'stringify', 1], { functions })).toBe('two') - expect(execSync(['_h', op.STRING, '2', op.CALL, 'stringify', 1], { functions })).toBe('zero') + expect(execSync(['_h', op.INTEGER, 1, op.CALL_GLOBAL, 'stringify', 1], { functions })).toBe('one') + expect(execSync(['_h', op.INTEGER, 2, op.CALL_GLOBAL, 'stringify', 1], { functions })).toBe('two') + expect(execSync(['_h', op.STRING, '2', op.CALL_GLOBAL, 'stringify', 1], { functions })).toBe('zero') + }) + + test('version 0 and 1', async () => { + expect(execSync(['_h', op.STRING, '1', op.STRING, '2', op.CALL_GLOBAL, 'concat', 2, op.RETURN])).toBe('21') + expect(execSync(['_H', 1, op.STRING, '1', op.STRING, '2', op.CALL_GLOBAL, 'concat', 2, op.RETURN])).toBe('12') }) test('should execute user-defined stringify async function correctly', async () => { @@ -304,9 +324,9 @@ describe('hogvm execute', () => { return Promise.resolve('zero') }, } - expect(await execAsync(['_h', op.INTEGER, 1, op.CALL, 'stringify', 1], { asyncFunctions })).toBe('one') - expect(await execAsync(['_h', op.INTEGER, 2, op.CALL, 'stringify', 1], { asyncFunctions })).toBe('two') - expect(await execAsync(['_h', op.STRING, '2', op.CALL, 'stringify', 1], { asyncFunctions })).toBe('zero') + expect(await execAsync(['_h', op.INTEGER, 1, op.CALL_GLOBAL, 'stringify', 1], { asyncFunctions })).toBe('one') + expect(await execAsync(['_h', op.INTEGER, 2, op.CALL_GLOBAL, 'stringify', 1], { asyncFunctions })).toBe('two') + expect(await execAsync(['_h', op.STRING, '2', op.CALL_GLOBAL, 'stringify', 1], { asyncFunctions })).toBe('zero') }) test('bytecode variable assignment', async () => { @@ -393,7 +413,7 @@ describe('hogvm execute', () => { 1, op.INTEGER, 2, - op.CALL, + op.CALL_GLOBAL, 'add', 2, op.INTEGER, @@ -402,12 +422,12 @@ describe('hogvm execute', () => { 4, op.INTEGER, 3, - op.CALL, + op.CALL_GLOBAL, 'add', 2, op.PLUS, op.PLUS, - op.CALL, + op.CALL_GLOBAL, 'divide', 2, op.RETURN, @@ -439,7 +459,7 @@ describe('hogvm execute', () => { op.GET_LOCAL, 0, op.MINUS, - op.CALL, + op.CALL_GLOBAL, 'fibonacci', 1, op.INTEGER, @@ -447,14 +467,14 @@ describe('hogvm execute', () => { op.GET_LOCAL, 0, op.MINUS, - op.CALL, + op.CALL_GLOBAL, 'fibonacci', 1, op.PLUS, op.RETURN, op.INTEGER, 6, - op.CALL, + op.CALL_GLOBAL, 'fibonacci', 1, op.RETURN, @@ -623,10 +643,10 @@ describe('hogvm execute', () => { .result ).toEqual(map({ key: map({ otherKey: 'value' }) })) - // return {key: 'value'}; + // // return {key: 'value'}; expect( - exec(['_h', op.STRING, 'key', op.GET_GLOBAL, 1, op.STRING, 'value', op.DICT, 1, op.RETURN]).result - ).toEqual(new Map([[null, 'value']])) + () => exec(['_h', op.STRING, 'key', op.GET_GLOBAL, 1, op.STRING, 'value', op.DICT, 1, op.RETURN]).result + ).toThrow('Global variable not found: key') // var key := 3; return {key: 'value'}; expect( @@ -1729,11 +1749,11 @@ describe('hogvm execute', () => { op.DICT, 2, ] - expect(execSync(['_h', op.STRING, '[1,2,3]', op.CALL, 'jsonParse', 1])).toEqual([1, 2, 3]) - expect(execSync(['_h', ...dict, op.CALL, 'jsonStringify', 1])).toEqual( + expect(execSync(['_h', op.STRING, '[1,2,3]', op.CALL_GLOBAL, 'jsonParse', 1])).toEqual([1, 2, 3]) + expect(execSync(['_h', ...dict, op.CALL_GLOBAL, 'jsonStringify', 1])).toEqual( '{"event":"$pageview","properties":{"$browser":"Chrome","$os":"Windows"}}' ) - expect(execSync(['_h', op.INTEGER, 2, ...dict, op.CALL, 'jsonStringify', 2])).toEqual( + expect(execSync(['_h', op.INTEGER, 2, ...dict, op.CALL_GLOBAL, 'jsonStringify', 2])).toEqual( JSON.stringify({ event: '$pageview', properties: { $browser: 'Chrome', $os: 'Windows' } }, null, 2) ) }) @@ -1783,21 +1803,21 @@ describe('hogvm execute', () => { 7, op.STRING, 'true1', - op.CALL, + op.CALL_GLOBAL, 'noisy_print', 1, op.JUMP, 5, op.STRING, 'false1', - op.CALL, + op.CALL_GLOBAL, 'noisy_print', 1, op.JUMP, 5, op.STRING, 'false2', - op.CALL, + op.CALL_GLOBAL, 'noisy_print', 1, op.RETURN, @@ -1823,13 +1843,13 @@ describe('hogvm execute', () => { op.POP, op.STRING, 'no', - op.CALL, + op.CALL_GLOBAL, 'noisy_print', 1, op.RETURN, op.STRING, 'post', - op.CALL, + op.CALL_GLOBAL, 'noisy_print', 1, op.POP, @@ -1840,7 +1860,7 @@ describe('hogvm execute', () => { test('uncaught exceptions', () => { // throw Error('Not a good day') - const bytecode1 = ['_h', op.NULL, op.NULL, op.STRING, 'Not a good day', op.CALL, 'Error', 3, op.THROW] + const bytecode1 = ['_h', op.NULL, op.NULL, op.STRING, 'Not a good day', op.CALL_GLOBAL, 'Error', 3, op.THROW] expect(() => execSync(bytecode1)).toThrow(new UncaughtHogVMException('Error', 'Not a good day', null)) // throw RetryError('Not a good day', {'key': 'value'}) @@ -1854,7 +1874,7 @@ describe('hogvm execute', () => { 1, op.STRING, 'Not a good day', - op.CALL, + op.CALL_GLOBAL, 'RetryError', 2, op.THROW, @@ -1863,4 +1883,39 @@ describe('hogvm execute', () => { new UncaughtHogVMException('RetryError', 'Not a good day', { key: 'value' }) ) }) + + test('returns serialized state', () => { + const bytecode = [ + '_h', + op.STRING, + 'key', + op.STRING, + 'value', + op.DICT, + 1, + op.GET_LOCAL, + 0, + op.CALL_GLOBAL, + 'fetch', + 1, + ] + expect(exec(bytecode, { asyncFunctions: { fetch: async () => null } })).toEqual({ + asyncFunctionArgs: [{ key: 'value' }], // not a Map + asyncFunctionName: 'fetch', + finished: false, + result: undefined, + state: { + asyncSteps: 1, + bytecode: bytecode, + callStack: [], + declaredFunctions: {}, + ip: 12, + maxMemUsed: 64, + ops: 5, + stack: [{ key: 'value' }], // is not a Map + syncDuration: 0, + throwStack: [], + }, + }) + }) }) diff --git a/hogvm/typescript/src/execute.ts b/hogvm/typescript/src/execute.ts index 389e1ded547f2..9f9bb5af6a422 100644 --- a/hogvm/typescript/src/execute.ts +++ b/hogvm/typescript/src/execute.ts @@ -84,10 +84,10 @@ export async function execAsync(bytecode: any[], options?: ExecOptions): Promise const result = await options?.asyncFunctions[response.asyncFunctionName]( ...response.asyncFunctionArgs.map(convertHogToJS) ) - vmState.stack.push(convertJSToHog(result)) + vmState.stack.push(result) } else if (response.asyncFunctionName in ASYNC_STL) { const result = await ASYNC_STL[response.asyncFunctionName]( - response.asyncFunctionArgs, + response.asyncFunctionArgs.map(convertHogToJS), response.asyncFunctionName, options?.timeout ?? DEFAULT_TIMEOUT_MS ) @@ -110,10 +110,10 @@ export function exec(code: any[] | VMState, options?: ExecOptions): ExecResult { } else { bytecode = code } - - if (!bytecode || bytecode.length === 0 || bytecode[0] !== '_h') { - throw new HogVMException("Invalid HogQL bytecode, must start with '_h'") + if (!bytecode || bytecode.length === 0 || (bytecode[0] !== '_h' && bytecode[0] !== '_H')) { + throw new HogVMException("Invalid HogQL bytecode, must start with '_H'") } + const version = bytecode[0] === '_H' ? bytecode[1] ?? 0 : 0 const startTime = Date.now() let temp: any @@ -123,7 +123,7 @@ export function exec(code: any[] | VMState, options?: ExecOptions): ExecResult { const asyncSteps = vmState ? vmState.asyncSteps : 0 const syncDuration = vmState ? vmState.syncDuration : 0 - const stack: any[] = vmState ? vmState.stack : [] + const stack: any[] = vmState ? vmState.stack.map(convertJSToHog) : [] const memStack: number[] = stack.map((s) => calculateCost(s)) const callStack: [number, number, number][] = vmState ? vmState.callStack : [] const throwStack: [number, number, number][] = vmState ? vmState.throwStack : [] @@ -131,7 +131,7 @@ export function exec(code: any[] | VMState, options?: ExecOptions): ExecResult { let memUsed = memStack.reduce((acc, val) => acc + val, 0) let maxMemUsed = Math.max(vmState ? vmState.maxMemUsed : 0, memUsed) const memLimit = options?.memoryLimit ?? DEFAULT_MAX_MEMORY - let ip = vmState ? vmState.ip : 1 + let ip = vmState ? vmState.ip : bytecode[0] === '_H' ? 2 : 1 let ops = vmState ? vmState.ops : 0 const timeout = options?.timeout ?? DEFAULT_TIMEOUT_MS const maxAsyncSteps = options?.maxAsyncSteps ?? DEFAULT_MAX_ASYNC_STEPS @@ -158,7 +158,9 @@ export function exec(code: any[] | VMState, options?: ExecOptions): ExecResult { memUsed -= memStack.splice(start, deleteCount).reduce((acc, val) => acc + val, 0) return stack.splice(start, deleteCount) } - function spliceStack1(start: number): any[] { + + /** Keep start elements, return those removed */ + function stackKeepFirstElements(start: number): any[] { memUsed -= memStack.splice(start).reduce((acc, val) => acc + val, 0) return stack.splice(start) } @@ -310,7 +312,11 @@ export function exec(code: any[] | VMState, options?: ExecOptions): ExecResult { for (let i = 0; i < count; i++) { chain.push(popStack()) } - pushStack(options?.globals ? convertJSToHog(getNestedValue(options.globals, chain)) : null) + if (options?.globals && chain[0] in options.globals && Object.hasOwn(options.globals, chain[0])) { + pushStack(convertJSToHog(getNestedValue(options.globals, chain))) + } else { + throw new HogVMException(`Global variable not found: ${chain.join('.')}`) + } break } case Operation.POP: @@ -320,7 +326,7 @@ export function exec(code: any[] | VMState, options?: ExecOptions): ExecResult { if (callStack.length > 0) { const [newIp, stackStart, _] = callStack.pop()! const response = popStack() - spliceStack1(stackStart) + stackKeepFirstElements(stackStart) pushStack(response) ip = newIp break @@ -400,7 +406,7 @@ export function exec(code: any[] | VMState, options?: ExecOptions): ExecResult { ip += bodyLength break } - case Operation.CALL: { + case Operation.CALL_GLOBAL: { checkTimeout() const name = next() // excluding "toString" only because of JavaScript --> no, it's not declared, it's omnipresent! o_O @@ -416,9 +422,14 @@ export function exec(code: any[] | VMState, options?: ExecOptions): ExecResult { if (temp > MAX_FUNCTION_ARGS_LENGTH) { throw new HogVMException('Too many arguments') } - const args = Array(temp) - .fill(null) - .map(() => popStack()) + + const args = + version === 0 + ? Array(temp) + .fill(null) + .map(() => popStack()) + : stackKeepFirstElements(stack.length - temp) + if (options?.functions && Object.hasOwn(options.functions, name) && options.functions[name]) { pushStack(convertJSToHog(options.functions[name](...args.map(convertHogToJS)))) } else if ( @@ -436,10 +447,10 @@ export function exec(code: any[] | VMState, options?: ExecOptions): ExecResult { result: undefined, finished: false, asyncFunctionName: name, - asyncFunctionArgs: args, + asyncFunctionArgs: args.map(convertHogToJS), state: { bytecode, - stack, + stack: stack.map(convertHogToJS), callStack, throwStack, declaredFunctions, @@ -475,7 +486,7 @@ export function exec(code: any[] | VMState, options?: ExecOptions): ExecResult { } if (throwStack.length > 0) { const [callStackLen, stackLen, catchIp] = throwStack.pop()! - spliceStack1(stackLen) + stackKeepFirstElements(stackLen) memUsed -= memStack.splice(stackLen).reduce((acc, val) => acc + val, 0) callStack.splice(callStackLen) pushStack(exception) diff --git a/hogvm/typescript/src/operation.ts b/hogvm/typescript/src/operation.ts index 2f8af6d275f56..4d4c5eee71a1a 100644 --- a/hogvm/typescript/src/operation.ts +++ b/hogvm/typescript/src/operation.ts @@ -1,6 +1,6 @@ export const enum Operation { GET_GLOBAL = 1, - CALL = 2, + CALL_GLOBAL = 2, AND = 3, OR = 4, NOT = 5, diff --git a/latest_migrations.manifest b/latest_migrations.manifest index 19fbd13d69bd3..af30ae6589b0d 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0016_rolemembership_organization_member otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0457_datawarehousejoin_deleted_at_and_more +posthog: 0460_alertconfiguration_threshold_alertsubscription_and_more sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/mypy-baseline.txt b/mypy-baseline.txt index e4f51c238f14e..4ec481c6b94fa 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -1,6 +1,7 @@ posthog/temporal/common/utils.py:0: error: Argument 1 to "abstractclassmethod" has incompatible type "Callable[[HeartbeatDetails, Any], Any]"; expected "Callable[[type[Never], Any], Any]" [arg-type] posthog/temporal/common/utils.py:0: note: This is likely because "from_activity" has named arguments: "cls". Consider marking them positional-only posthog/temporal/common/utils.py:0: error: Argument 2 to "__get__" of "classmethod" has incompatible type "type[HeartbeatType]"; expected "type[Never]" [arg-type] +posthog/tasks/exports/ordered_csv_renderer.py:0: error: No return value expected [return-value] posthog/warehouse/models/ssh_tunnel.py:0: error: Incompatible types in assignment (expression has type "NoEncryption", variable has type "BestAvailableEncryption") [assignment] posthog/temporal/data_imports/pipelines/sql_database/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Dict entry 2 has incompatible type "Literal['auto']": "None"; expected "Literal['json_response', 'header_link', 'auto', 'single_page', 'cursor', 'offset', 'page_number']": "type[BasePaginator]" [dict-item] @@ -26,10 +27,32 @@ posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Un posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] +posthog/utils.py:0: error: No overload variant of "asdict" matches argument type "type[DataclassInstance]" [call-overload] +posthog/utils.py:0: note: Possible overload variants: +posthog/utils.py:0: note: def asdict(obj: DataclassInstance) -> dict[str, Any] +posthog/utils.py:0: note: def [_T] asdict(obj: DataclassInstance, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T +posthog/utils.py:0: error: Name "timezone.datetime" is not defined [name-defined] +posthog/settings/data_stores.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] +posthog/settings/data_stores.py:0: error: Name "DATABASE_URL" already defined on line 0 [no-redef] +posthog/plugins/utils.py:0: error: Subclass of "str" and "bytes" cannot exist: would have incompatible method signatures [unreachable] +posthog/plugins/utils.py:0: error: Statement is unreachable [unreachable] +posthog/models/dashboard.py:0: error: Need type annotation for "insights" [var-annotated] posthog/hogql/database/schema/numbers.py:0: error: Incompatible types in assignment (expression has type "dict[str, IntegerDatabaseField]", variable has type "dict[str, FieldOrTable]") [assignment] posthog/hogql/database/schema/numbers.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance posthog/hogql/database/schema/numbers.py:0: note: Consider using "Mapping" instead, which is covariant in the value type posthog/hogql/ast.py:0: error: Incompatible return value type (got "bool | None", expected "bool") [return-value] +posthog/warehouse/data_load/service.py:0: error: Unsupported operand types for >= ("timedelta" and "None") [operator] +posthog/warehouse/data_load/service.py:0: note: Left operand is of type "timedelta | None" +posthog/warehouse/data_load/service.py:0: error: Incompatible return value type (got "tuple[timedelta | None, timedelta]", expected "tuple[timedelta, timedelta]") [return-value] +posthog/warehouse/data_load/service.py:0: error: Unsupported operand types for >= ("timedelta" and "None") [operator] +posthog/warehouse/data_load/service.py:0: note: Left operand is of type "timedelta | None" +posthog/warehouse/data_load/service.py:0: error: Incompatible return value type (got "tuple[timedelta | None, timedelta]", expected "tuple[timedelta, timedelta]") [return-value] +posthog/warehouse/data_load/service.py:0: error: Incompatible return value type (got "tuple[timedelta | None, timedelta]", expected "tuple[timedelta, timedelta]") [return-value] +posthog/tasks/tasks.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] +posthog/models/subscription.py:0: error: Argument 2 to "SubscriptionResourceInfo" has incompatible type "str | None"; expected "str" [arg-type] +posthog/models/exported_asset.py:0: error: Value of type variable "_StrOrPromiseT" of "slugify" cannot be "str | None" [type-var] +posthog/models/action/action.py:0: error: Need type annotation for "events" [var-annotated] +posthog/models/action/action.py:0: error: Argument 1 to "len" has incompatible type "str | None"; expected "Sized" [arg-type] posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "CTE") [assignment] posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "CTE") [assignment] posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "CTE") [assignment] @@ -38,46 +61,35 @@ posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression has type "WindowExpr", variable has type "CTE") [assignment] posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression has type "FieldAliasType", variable has type "BaseTableType | SelectUnionQueryType | SelectQueryType | SelectQueryAliasType | SelectViewType") [assignment] posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression has type "Type", variable has type "BaseTableType | SelectUnionQueryType | SelectQueryType | SelectQueryAliasType | SelectViewType") [assignment] +ee/models/license.py:0: error: Incompatible return value type (got "_T", expected "License | None") [return-value] +ee/models/license.py:0: error: Cannot use a covariant type variable as a parameter [misc] +ee/models/license.py:0: error: "_T" has no attribute "plan" [attr-defined] +ee/models/license.py:0: error: Incompatible return value type (got "str | bool", expected "bool") [return-value] +ee/models/explicit_team_membership.py:0: error: Incompatible return value type (got "int", expected "Level") [return-value] posthog/hogql/resolver_utils.py:0: error: Argument 1 to "lookup_field_by_name" has incompatible type "SelectQueryType | SelectUnionQueryType"; expected "SelectQueryType" [arg-type] +posthog/models/filters/mixins/simplify.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/hogql/parser.py:0: error: Item "None" of "list[Expr] | None" has no attribute "__iter__" (not iterable) [union-attr] posthog/hogql/parser.py:0: error: "None" has no attribute "text" [attr-defined] posthog/hogql/parser.py:0: error: "None" has no attribute "text" [attr-defined] posthog/hogql/parser.py:0: error: Statement is unreachable [unreachable] +posthog/helpers/dashboard_templates.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str | Combinable") [assignment] +posthog/hogql/functions/cohort.py:0: error: Incompatible type for lookup 'team_id': (got "int | None", expected "str | int") [misc] +posthog/hogql/functions/cohort.py:0: error: Incompatible type for lookup 'team_id': (got "int | None", expected "str | int") [misc] +posthog/hogql/database/schema/persons_pdi.py:0: error: Incompatible types in assignment (expression has type "Organization | None", variable has type "Organization") [assignment] posthog/hogql/database/schema/groups.py:0: error: Incompatible types in assignment (expression has type "dict[str, DatabaseField]", variable has type "dict[str, FieldOrTable]") [assignment] posthog/hogql/database/schema/groups.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance posthog/hogql/database/schema/groups.py:0: note: Consider using "Mapping" instead, which is covariant in the value type posthog/hogql/database/schema/groups.py:0: error: Incompatible types in assignment (expression has type "dict[str, DatabaseField]", variable has type "dict[str, FieldOrTable]") [assignment] posthog/hogql/database/schema/groups.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance posthog/hogql/database/schema/groups.py:0: note: Consider using "Mapping" instead, which is covariant in the value type -posthog/storage/object_storage.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_ENABLED' [misc] -posthog/storage/object_storage.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_ENDPOINT' [misc] -posthog/storage/object_storage.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_REGION' [misc] -posthog/storage/object_storage.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_BUCKET' [misc] -posthog/storage/object_storage.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_BUCKET' [misc] -posthog/storage/object_storage.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_BUCKET' [misc] -posthog/storage/object_storage.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_BUCKET' [misc] -posthog/storage/object_storage.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_BUCKET' [misc] -posthog/storage/object_storage.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_BUCKET' [misc] -posthog/storage/object_storage.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_BUCKET' [misc] -posthog/storage/object_storage.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_BUCKET' [misc] -posthog/redis.py:0: error: Import cycle from Django settings module prevents type inference for 'REDIS_URL' [misc] -posthog/plugins/utils.py:0: error: Subclass of "str" and "bytes" cannot exist: would have incompatible method signatures [unreachable] -posthog/plugins/utils.py:0: error: Statement is unreachable [unreachable] -posthog/clickhouse/kafka_engine.py:0: error: Import cycle from Django settings module prevents type inference for 'KAFKA_HOSTS_FOR_CLICKHOUSE' [misc] -posthog/utils.py:0: error: Import cycle from Django settings module prevents type inference for 'PERSISTED_FEATURE_FLAGS' [misc] -posthog/utils.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_ENABLED' [misc] -posthog/utils.py:0: error: Name "timezone.datetime" is not defined [name-defined] -posthog/utils.py:0: error: Import cycle from Django settings module prevents type inference for 'DECIDE_TRACK_TEAM_IDS' [misc] -posthog/models/dashboard.py:0: error: Need type annotation for "insights" [var-annotated] -posthog/tasks/tasks.py:0: error: Import cycle from Django settings module prevents type inference for 'INGESTION_LAG_METRIC_TEAM_IDS' [misc] -posthog/tasks/tasks.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] -posthog/warehouse/s3.py:0: error: Import cycle from Django settings module prevents type inference for 'OBJECT_STORAGE_ENDPOINT' [misc] -posthog/settings/data_stores.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] -posthog/settings/data_stores.py:0: error: Name "DATABASE_URL" already defined on line 0 [no-redef] -posthog/models/filters/mixins/simplify.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] +posthog/hogql/database/schema/persons.py:0: error: Incompatible types in assignment (expression has type "Organization | None", variable has type "Organization") [assignment] +posthog/batch_exports/service.py:0: error: Argument 4 to "backfill_export" has incompatible type "datetime | None"; expected "datetime" [arg-type] posthog/models/filters/base_filter.py:0: error: "HogQLContext" has no attribute "person_on_events_mode" [attr-defined] posthog/models/team/team.py:0: error: Statement is unreachable [unreachable] posthog/models/team/team.py:0: error: Statement is unreachable [unreachable] +posthog/models/hog_functions/hog_function.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] +posthog/models/hog_functions/hog_function.py:0: error: Argument 2 to "get_hog_function_status" has incompatible type "UUID"; expected "str" [arg-type] +posthog/models/hog_functions/hog_function.py:0: error: Argument 2 to "patch_hog_function_status" has incompatible type "UUID"; expected "str" [arg-type] posthog/models/user.py:0: error: Incompatible types in assignment (expression has type "type[User]", base class "BaseManager" defined the type as "type[_T]") [assignment] posthog/models/user.py:0: error: Cannot override class variable (previously declared on base class "AbstractBaseUser") with instance variable [misc] posthog/models/user.py:0: error: Incompatible types in assignment (expression has type "None", base class "AbstractUser" defined the type as "CharField[str | int | Combinable, str]") [assignment] @@ -91,30 +103,13 @@ posthog/models/user.py:0: note: Subclass: posthog/models/user.py:0: note: bool posthog/models/user.py:0: error: "User" has no attribute "social_auth" [attr-defined] posthog/models/user.py:0: error: "User" has no attribute "social_auth" [attr-defined] +posthog/models/plugin.py:0: error: Argument 1 to "extract_plugin_code" has incompatible type "bytes | memoryview | None"; expected "bytes" [arg-type] posthog/models/plugin.py:0: error: Name "timezone.datetime" is not defined [name-defined] posthog/models/plugin.py:0: error: Name "timezone.datetime" is not defined [name-defined] posthog/models/person/person.py:0: error: "_T" has no attribute "_add_distinct_ids" [attr-defined] -posthog/models/person/person.py:0: error: Incompatible types in assignment (expression has type "list[Never]", variable has type "_QuerySet[PersonDistinctId, str]") [assignment] -posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] -posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Argument 1 to "create_hogql_database" has incompatible type "int | None"; expected "int" [arg-type] -posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery | SelectUnionQuery") [assignment] -posthog/models/feature_flag/flag_matching.py:0: error: Statement is unreachable [unreachable] -posthog/models/feature_flag/flag_matching.py:0: error: Value of type variable "_E" of "ExpressionWrapper" cannot be "object" [type-var] -posthog/models/feature_flag/flag_matching.py:0: error: Value of type variable "_E" of "ExpressionWrapper" cannot be "object" [type-var] -posthog/models/feature_flag/flag_matching.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] -posthog/models/feature_flag/flag_matching.py:0: error: Value of type variable "_E" of "ExpressionWrapper" cannot be "object" [type-var] -ee/models/license.py:0: error: Incompatible return value type (got "_T", expected "License | None") [return-value] -ee/models/license.py:0: error: Cannot use a covariant type variable as a parameter [misc] -ee/models/license.py:0: error: "_T" has no attribute "plan" [attr-defined] -posthog/hogql_queries/utils/query_date_range.py:0: error: Item "None" of "dict[str, int] | None" has no attribute "get" [union-attr] -posthog/hogql_queries/utils/query_date_range.py:0: error: Unsupported operand types for * ("object" and "int") [operator] -posthog/hogql_queries/utils/query_date_range.py:0: error: Incompatible return value type (got "int", expected "timedelta") [return-value] -posthog/hogql_queries/utils/query_date_range.py:0: error: Item "None" of "IntervalType | None" has no attribute "name" [union-attr] -posthog/hogql_queries/utils/query_date_range.py:0: error: Item "None" of "IntervalType | None" has no attribute "name" [union-attr] -posthog/hogql_queries/utils/query_date_range.py:0: error: Incompatible default for argument "source" (default has type "None", argument has type "Expr") [assignment] -posthog/hogql_queries/utils/query_date_range.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True -posthog/hogql_queries/utils/query_date_range.py:0: note: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase -posthog/hogql_queries/utils/query_date_range.py:0: error: Item "None" of "IntervalType | None" has no attribute "name" [union-attr] +posthog/models/person/person.py:0: error: Argument "version" to "create_person" has incompatible type "int | None"; expected "int" [arg-type] +posthog/models/person/person.py:0: error: Incompatible types in assignment (expression has type "list[Never]", variable has type "QuerySet[PersonDistinctId, str]") [assignment] +posthog/models/organization_invite.py:0: error: Argument "level" to "join" of "User" has incompatible type "int"; expected "Level" [arg-type] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Dict entry 4 has incompatible type "str": "Literal[0, 1, 2, 3, 4] | None"; expected "str": "str | None" [dict-item] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Item "None" of "Any | None" has no attribute "__iter__" (not iterable) [union-attr] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Argument 1 to "float" has incompatible type "Any | None"; expected "str | Buffer | SupportsFloat | SupportsIndex" [arg-type] @@ -126,67 +121,65 @@ posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Argument posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Dict entry 0 has incompatible type "str": "PathsFilter"; expected "str": "TrendsFilter" [dict-item] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Dict entry 0 has incompatible type "str": "LifecycleFilter"; expected "str": "TrendsFilter" [dict-item] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Dict entry 0 has incompatible type "str": "StickinessFilter"; expected "str": "TrendsFilter" [dict-item] -posthog/hogql/functions/cohort.py:0: error: Incompatible type for lookup 'team_id': (got "int | None", expected "str | int") [misc] -posthog/hogql/functions/cohort.py:0: error: Incompatible type for lookup 'team_id': (got "int | None", expected "str | int") [misc] -posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined [name-defined] -posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined [name-defined] -posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined [name-defined] -posthog/api/utils.py:0: error: Incompatible types in assignment (expression has type "type[EventDefinition]", variable has type "type[EnterpriseEventDefinition]") [assignment] -posthog/api/utils.py:0: error: Argument 1 to "UUID" has incompatible type "int | str"; expected "str | None" [arg-type] -ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] -ee/billing/quota_limiting.py:0: error: "object" has no attribute "get" [attr-defined] -ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] -ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] -posthog/tasks/email.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: List item 1 has incompatible type "str | None"; expected "str" [list-item] -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type] -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: note: Consider using "Sequence" instead, which is covariant -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: List item 1 has incompatible type "str | None"; expected "str" [list-item] -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Argument "chain" to "Field" has incompatible type "list[str | int] | list[str]"; expected "list[str | int]" [arg-type] -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "select" [union-attr] -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "select" [union-attr] -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "group_by" [union-attr] -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "None" of "list[Expr] | None" has no attribute "append" [union-attr] -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "select" [union-attr] -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "group_by" [union-attr] -posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "None" of "list[Expr] | Any | None" has no attribute "append" [union-attr] -posthog/hogql/resolver.py:0: error: Argument 1 of "visit" is incompatible with supertype "Visitor"; supertype defines the argument type as "AST | None" [override] -posthog/hogql/resolver.py:0: note: This violates the Liskov substitution principle -posthog/hogql/resolver.py:0: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides +posthog/session_recordings/models/session_recording.py:0: error: Argument "distinct_id" to "MissingPerson" has incompatible type "str | None"; expected "str" [arg-type] +posthog/session_recordings/models/session_recording.py:0: error: Incompatible type for lookup 'persondistinctid__team_id': (got "Team", expected "str | int") [misc] +ee/tasks/subscriptions/slack_subscriptions.py:0: error: Item "None" of "datetime | None" has no attribute "strftime" [union-attr] +posthog/warehouse/models/table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_key" [union-attr] +posthog/warehouse/models/table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_secret" [union-attr] +posthog/warehouse/models/table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_key" [union-attr] +posthog/warehouse/models/table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_secret" [union-attr] +posthog/warehouse/models/table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_key" [union-attr] +posthog/warehouse/models/table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_secret" [union-attr] +posthog/queries/util.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/models/cohort/cohort.py:0: error: Need type annotation for "people" [var-annotated] +posthog/hogql_queries/utils/query_date_range.py:0: error: Item "None" of "dict[str, int] | None" has no attribute "get" [union-attr] +posthog/hogql_queries/utils/query_date_range.py:0: error: Unsupported operand types for * ("object" and "int") [operator] +posthog/hogql_queries/utils/query_date_range.py:0: error: Incompatible return value type (got "int", expected "timedelta") [return-value] +posthog/hogql_queries/utils/query_date_range.py:0: error: Item "None" of "IntervalType | None" has no attribute "name" [union-attr] +posthog/hogql_queries/utils/query_date_range.py:0: error: Item "None" of "IntervalType | None" has no attribute "name" [union-attr] +posthog/hogql_queries/utils/query_date_range.py:0: error: Incompatible default for argument "source" (default has type "None", argument has type "Expr") [assignment] +posthog/hogql_queries/utils/query_date_range.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True +posthog/hogql_queries/utils/query_date_range.py:0: note: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase +posthog/hogql_queries/utils/query_date_range.py:0: error: Item "None" of "IntervalType | None" has no attribute "name" [union-attr] posthog/hogql/resolver.py:0: error: List comprehension has incompatible type List[SelectQueryType | None]; expected List[SelectQueryType] [misc] -posthog/hogql/resolver.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "JoinExpr | None") [assignment] posthog/hogql/resolver.py:0: error: Need type annotation for "columns_with_visible_alias" (hint: "columns_with_visible_alias: dict[, ] = ...") [var-annotated] -posthog/hogql/resolver.py:0: error: Incompatible types in assignment (expression has type "Type | None", target has type "Type") [assignment] -posthog/hogql/resolver.py:0: error: Incompatible types in assignment (expression has type "Type | None", target has type "Type") [assignment] -posthog/hogql/resolver.py:0: error: List comprehension has incompatible type List[Expr]; expected List[OrderExpr] [misc] -posthog/hogql/resolver.py:0: error: Value expression in dictionary comprehension has incompatible type "Expr"; expected type "WindowExpr" [misc] posthog/hogql/resolver.py:0: error: Statement is unreachable [unreachable] posthog/hogql/resolver.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery | SelectUnionQuery | Field | None") [assignment] posthog/hogql/resolver.py:0: error: Item "None" of "Database | None" has no attribute "get_table" [union-attr] -posthog/hogql/resolver.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "JoinExpr") [assignment] posthog/hogql/resolver.py:0: error: Incompatible types in assignment (expression has type "TableType", variable has type "LazyTableType") [assignment] posthog/hogql/resolver.py:0: error: Argument "table_type" to "TableAliasType" has incompatible type "LazyTableType"; expected "TableType" [arg-type] posthog/hogql/resolver.py:0: error: Incompatible types in assignment (expression has type "LazyTableType", variable has type "TableAliasType") [assignment] posthog/hogql/resolver.py:0: error: Argument 1 to "clone_expr" has incompatible type "SelectQuery | SelectUnionQuery | Field | None"; expected "Expr" [arg-type] -posthog/hogql/resolver.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "JoinExpr | None") [assignment] posthog/hogql/resolver.py:0: error: Statement is unreachable [unreachable] posthog/hogql/resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "join_type" [union-attr] -posthog/hogql/resolver.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SampleExpr | None") [assignment] posthog/hogql/resolver.py:0: error: Argument "select_query_type" to "SelectViewType" has incompatible type "SelectQueryType | None"; expected "SelectQueryType | SelectUnionQueryType" [arg-type] posthog/hogql/resolver.py:0: error: Item "None" of "SelectQuery | SelectUnionQuery | Field | None" has no attribute "type" [union-attr] posthog/hogql/resolver.py:0: error: Argument "select_query_type" to "SelectQueryAliasType" has incompatible type "Type | Any | None"; expected "SelectQueryType | SelectUnionQueryType" [arg-type] posthog/hogql/resolver.py:0: error: Item "None" of "SelectQuery | SelectUnionQuery | Field | None" has no attribute "type" [union-attr] posthog/hogql/resolver.py:0: error: Incompatible types in assignment (expression has type "Type | Any | None", variable has type "BaseTableType | SelectUnionQueryType | SelectQueryType | SelectQueryAliasType | SelectViewType | None") [assignment] posthog/hogql/resolver.py:0: error: Argument 1 to "append" of "list" has incompatible type "BaseTableType | SelectUnionQueryType | SelectQueryType | SelectQueryAliasType | SelectViewType | None"; expected "SelectQueryType | SelectUnionQueryType" [arg-type] -posthog/hogql/resolver.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "JoinExpr | None") [assignment] -posthog/hogql/resolver.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SampleExpr | None") [assignment] posthog/hogql/resolver.py:0: error: Statement is unreachable [unreachable] posthog/hogql/resolver.py:0: error: Statement is unreachable [unreachable] posthog/hogql/resolver.py:0: error: Item "None" of "Type | None" has no attribute "resolve_constant_type" [union-attr] posthog/hogql/resolver.py:0: error: Item "None" of "Type | None" has no attribute "resolve_constant_type" [union-attr] posthog/hogql/resolver.py:0: error: Item "None" of "Type | None" has no attribute "resolve_constant_type" [union-attr] -posthog/demo/matrix/taxonomy_inference.py:0: error: Name "timezone.datetime" is not defined [name-defined] +posthog/models/feature_flag/feature_flag.py:0: error: Need type annotation for "analytics_dashboards" [var-annotated] +posthog/models/feature_flag/feature_flag.py:0: error: Unsupported right operand type for in ("str | Any | None") [operator] +posthog/models/feature_flag/feature_flag.py:0: error: Item "None" of "Insight | None" has no attribute "name" [union-attr] +posthog/models/feature_flag/feature_flag.py:0: error: Incompatible types in assignment (expression has type "User | None", variable has type "User | AnonymousUser") [assignment] +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: List item 1 has incompatible type "str | None"; expected "str" [list-item] +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type] +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: note: Consider using "Sequence" instead, which is covariant +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: List item 1 has incompatible type "str | None"; expected "str" [list-item] +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Argument "chain" to "Field" has incompatible type "list[str | int] | list[str]"; expected "list[str | int]" [arg-type] +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "select" [union-attr] +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "select" [union-attr] +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "group_by" [union-attr] +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "None" of "list[Expr] | None" has no attribute "append" [union-attr] +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "select" [union-attr] +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "group_by" [union-attr] +posthog/hogql_queries/insights/trends/aggregation_operations.py:0: error: Item "None" of "list[Expr] | Any | None" has no attribute "append" [union-attr] posthog/hogql/transforms/lazy_tables.py:0: error: Incompatible types in assignment (expression has type "dict[Never, Never]", variable has type "list[ConstraintOverride]") [assignment] posthog/hogql/transforms/lazy_tables.py:0: error: Non-overlapping equality check (left operand type: "TableType", right operand type: "LazyTableType") [comparison-overlap] posthog/hogql/transforms/lazy_tables.py:0: error: Non-overlapping equality check (left operand type: "TableType", right operand type: "LazyTableType") [comparison-overlap] @@ -213,6 +206,16 @@ posthog/hogql/transforms/in_cohort.py:0: error: Item "None" of "JoinConstraint | posthog/hogql/transforms/in_cohort.py:0: error: Item "Expr" of "Expr | Any" has no attribute "right" [union-attr] posthog/hogql/transforms/in_cohort.py:0: error: List item 0 has incompatible type "SelectQueryType | None"; expected "SelectQueryType" [list-item] posthog/hogql/transforms/in_cohort.py:0: error: List item 0 has incompatible type "SelectQueryType | None"; expected "SelectQueryType" [list-item] +posthog/hogql/database/database.py:0: error: Argument "week_start_day" to "Database" has incompatible type "int | Any | None"; expected "WeekStartDay | None" [arg-type] +posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] +posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Argument 1 to "create_hogql_database" has incompatible type "int | None"; expected "int" [arg-type] +posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery | SelectUnionQuery") [assignment] +posthog/models/feature_flag/flag_matching.py:0: error: Statement is unreachable [unreachable] +posthog/models/feature_flag/flag_matching.py:0: error: Value expression in dictionary comprehension has incompatible type "int"; expected type "Literal[0, 1, 2, 3, 4]" [misc] +posthog/models/feature_flag/flag_matching.py:0: error: Value of type variable "_E" of "ExpressionWrapper" cannot be "object" [type-var] +posthog/models/feature_flag/flag_matching.py:0: error: Value of type variable "_E" of "ExpressionWrapper" cannot be "object" [type-var] +posthog/models/feature_flag/flag_matching.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] +posthog/models/feature_flag/flag_matching.py:0: error: Value of type variable "_E" of "ExpressionWrapper" cannot be "object" [type-var] posthog/hogql/printer.py:0: error: Argument 1 to "create_hogql_database" has incompatible type "int | None"; expected "int" [arg-type] posthog/hogql/printer.py:0: error: List comprehension has incompatible type List[SelectQueryType | None]; expected List[SelectQueryType] [misc] posthog/hogql/printer.py:0: error: Argument "stack" to "_Printer" has incompatible type "list[SelectQuery]"; expected "list[AST] | None" [arg-type] @@ -232,7 +235,42 @@ posthog/hogql/printer.py:0: error: "FieldOrTable" has no attribute "name" [attr posthog/hogql/printer.py:0: error: "FieldOrTable" has no attribute "name" [attr-defined] posthog/hogql/printer.py:0: error: Argument 2 to "_get_materialized_column" of "_Printer" has incompatible type "str | int"; expected "str" [arg-type] posthog/hogql/printer.py:0: error: Argument 1 to "_print_identifier" of "_Printer" has incompatible type "str | None"; expected "str" [arg-type] -posthog/api/action.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] +posthog/user_permissions.py:0: error: Key expression in dictionary comprehension has incompatible type "UUID"; expected type "int" [misc] +posthog/user_permissions.py:0: error: Incompatible return value type (got "int", expected "Level | None") [return-value] +posthog/user_permissions.py:0: error: Incompatible return value type (got "int", expected "Level | None") [return-value] +posthog/user_permissions.py:0: error: Incompatible return value type (got "int", expected "RestrictionLevel") [return-value] +posthog/tasks/warehouse.py:0: error: Argument 1 to "cancel_external_data_workflow" has incompatible type "str | None"; expected "str" [arg-type] +posthog/tasks/update_survey_iteration.py:0: error: Incompatible types in assignment (expression has type "ForeignKey[Any, _ST] | Any", variable has type "FeatureFlag | Combinable | None") [assignment] +posthog/tasks/update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr] +posthog/tasks/update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr] +posthog/tasks/update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "save" [union-attr] +posthog/tasks/update_survey_iteration.py:0: error: Incompatible type for "key" of "FeatureFlag" (got "UUID", expected "str | int | Combinable") [misc] +posthog/permissions.py:0: error: Argument 2 to "feature_enabled" has incompatible type "str | None"; expected "str" [arg-type] +posthog/models/event/util.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "datetime") [assignment] +posthog/models/event/util.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/event_usage.py:0: error: Argument 1 to "capture" has incompatible type "str | None"; expected "str" [arg-type] +posthog/event_usage.py:0: error: Argument 1 to "capture" has incompatible type "str | None"; expected "str" [arg-type] +posthog/event_usage.py:0: error: Argument 1 to "alias" has incompatible type "str | None"; expected "str" [arg-type] +posthog/event_usage.py:0: error: Argument 1 to "capture" has incompatible type "str | None"; expected "str" [arg-type] +posthog/event_usage.py:0: error: Argument 1 to "capture" has incompatible type "str | None"; expected "str" [arg-type] +posthog/event_usage.py:0: error: Argument 1 to "capture" has incompatible type "str | None"; expected "str" [arg-type] +posthog/event_usage.py:0: error: Argument 1 to "capture" has incompatible type "str | None"; expected "str" [arg-type] +posthog/event_usage.py:0: error: Argument 1 to "capture" has incompatible type "str | None"; expected "str" [arg-type] +posthog/event_usage.py:0: error: Argument 1 to "capture" has incompatible type "str | None"; expected "str" [arg-type] +posthog/event_usage.py:0: error: Argument 1 to "capture" has incompatible type "str | None"; expected "str" [arg-type] +posthog/event_usage.py:0: error: Argument 1 to "capture" has incompatible type "str | None"; expected "str" [arg-type] +posthog/event_usage.py:0: error: Argument 1 to "capture" has incompatible type "str | None"; expected "str" [arg-type] +posthog/demo/matrix/taxonomy_inference.py:0: error: Name "timezone.datetime" is not defined [name-defined] +posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined [name-defined] +posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined [name-defined] +posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined [name-defined] +posthog/api/shared.py:0: error: Incompatible return value type (got "int | None", expected "Level | None") [return-value] +ee/billing/quota_limiting.py:0: error: Argument 2 to "feature_enabled" has incompatible type "UUID"; expected "str" [arg-type] +ee/billing/quota_limiting.py:0: error: List comprehension has incompatible type List[int]; expected List[str] [misc] +ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] +ee/billing/quota_limiting.py:0: error: "object" has no attribute "get" [attr-defined] +ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] +ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] posthog/test/base.py:0: error: Module has no attribute "configure" [attr-defined] posthog/test/base.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Organization") [assignment] posthog/test/base.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Project") [assignment] @@ -248,23 +286,48 @@ posthog/test/base.py:0: error: Item "None" of "FrameType | None" has no attribut posthog/test/base.py:0: error: Item "None" of "FrameType | Any | None" has no attribute "f_locals" [union-attr] posthog/test/base.py:0: error: Item "None" of "FrameType | None" has no attribute "f_back" [union-attr] posthog/test/base.py:0: error: Item "None" of "FrameType | Any | None" has no attribute "f_locals" [union-attr] +posthog/tasks/email.py:0: error: Item "None" of "User | None" has no attribute "first_name" [union-attr] +posthog/tasks/email.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] +posthog/tasks/email.py:0: error: Argument "email" to "add_recipient" of "EmailMessage" has incompatible type "str | None"; expected "str" [arg-type] +posthog/tasks/email.py:0: error: Argument 1 to "capture" has incompatible type "str | None"; expected "str" [arg-type] +posthog/tasks/email.py:0: error: Incompatible types in assignment (expression has type "Team | None", variable has type "Team") [assignment] +posthog/api/documentation.py:0: error: Signature of "run_validation" incompatible with supertype "Field" [override] +posthog/api/documentation.py:0: note: Superclass: +posthog/api/documentation.py:0: note: def run_validation(self, data: Any = ...) -> Any +posthog/api/documentation.py:0: note: Subclass: +posthog/api/documentation.py:0: note: def run_validation(self, data: Any) -> Any +ee/tasks/subscriptions/email_subscriptions.py:0: error: Item "None" of "User | None" has no attribute "email" [union-attr] +ee/tasks/subscriptions/email_subscriptions.py:0: error: Item "None" of "datetime | None" has no attribute "isoformat" [union-attr] +ee/tasks/subscriptions/email_subscriptions.py:0: error: Item "None" of "datetime | None" has no attribute "strftime" [union-attr] +ee/tasks/subscriptions/email_subscriptions.py:0: error: Item "None" of "User | None" has no attribute "first_name" [union-attr] +ee/billing/billing_manager.py:0: error: Module has no attribute "utc" [attr-defined] +ee/billing/billing_manager.py:0: error: Cannot resolve keyword 'distinct_id' into field. Choices are: explicit_team_membership, id, joined_at, level, organization, organization_id, role_membership, updated_at, user, user_id [misc] +ee/billing/billing_manager.py:0: error: Cannot resolve keyword 'email' into field. Choices are: explicit_team_membership, id, joined_at, level, organization, organization_id, role_membership, updated_at, user, user_id [misc] +ee/billing/billing_manager.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "bool | Combinable | None") [assignment] posthog/models/property/util.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/models/property/util.py:0: error: Argument 3 to "format_filter_query" has incompatible type "HogQLContext | None"; expected "HogQLContext" [arg-type] posthog/models/property/util.py:0: error: Argument 3 to "format_cohort_subquery" has incompatible type "HogQLContext | None"; expected "HogQLContext" [arg-type] posthog/models/property/util.py:0: error: Argument 1 to "append" of "list" has incompatible type "str | int"; expected "str" [arg-type] posthog/models/property/util.py:0: error: Argument 1 to "append" of "list" has incompatible type "str | int"; expected "str" [arg-type] posthog/models/property/util.py:0: error: Argument 1 to "append" of "list" has incompatible type "str | int"; expected "str" [arg-type] +posthog/api/utils.py:0: error: Incompatible types in assignment (expression has type "type[EventDefinition]", variable has type "type[EnterpriseEventDefinition]") [assignment] +posthog/api/utils.py:0: error: Argument 1 to "UUID" has incompatible type "int | str"; expected "str | None" [arg-type] +posthog/api/email_verification.py:0: error: Argument 2 to "feature_enabled" has incompatible type "UUID"; expected "str" [arg-type] posthog/queries/trends/util.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | None"; expected "str" [arg-type] posthog/hogql/property.py:0: error: Incompatible type for lookup 'id': (got "str | int | list[str]", expected "str | int") [misc] posthog/hogql/property.py:0: error: Incompatible type for lookup 'pk': (got "str | float", expected "str | int") [misc] +posthog/api/routing.py:0: error: Incompatible return value type (got "Project | None", expected "Project") [return-value] +posthog/api/capture.py:0: error: Module has no attribute "utc" [attr-defined] posthog/hogql/filters.py:0: error: Incompatible default for argument "team" (default has type "None", argument has type "Team") [assignment] posthog/hogql/filters.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True posthog/hogql/filters.py:0: note: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase +posthog/api/organization.py:0: error: Incompatible return value type (got "int | None", expected "Level | None") [return-value] posthog/hogql/query.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "str | SelectQuery | SelectUnionQuery") [assignment] posthog/hogql/query.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery | SelectUnionQuery") [assignment] posthog/hogql/query.py:0: error: Argument 1 to "get_default_limit_for_context" has incompatible type "LimitContext | None"; expected "LimitContext" [arg-type] posthog/hogql/query.py:0: error: "SelectQuery" has no attribute "select_queries" [attr-defined] posthog/hogql/query.py:0: error: Subclass of "SelectQuery" and "SelectUnionQuery" cannot exist: would have incompatible method signatures [unreachable] +posthog/api/action.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] posthog/queries/person_query.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/queries/event_query/event_query.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/hogql_queries/sessions_timeline_query_runner.py:0: error: Statement is unreachable [unreachable] @@ -273,9 +336,17 @@ posthog/hogql_queries/hogql_query_runner.py:0: error: Incompatible return value posthog/hogql_queries/events_query_runner.py:0: error: Statement is unreachable [unreachable] posthog/queries/breakdown_props.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | int"; expected "str" [arg-type] posthog/queries/breakdown_props.py:0: error: Incompatible type for lookup 'pk': (got "str | None", expected "str | int") [misc] +posthog/queries/breakdown_props.py:0: error: Incompatible return value type (got "str | None", expected "str") [return-value] +posthog/queries/actor_base_query.py:0: error: Incompatible types (expression has type "datetime", TypedDict item "created_at" has type "str | None") [typeddict-item] +posthog/queries/actor_base_query.py:0: error: Incompatible types (expression has type "datetime", TypedDict item "created_at" has type "str | None") [typeddict-item] posthog/hogql_queries/insights/funnels/base.py:0: error: Incompatible type for lookup 'pk': (got "str | int | None", expected "str | int") [misc] posthog/queries/foss_cohort_query.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/queries/funnels/base.py:0: error: "HogQLContext" has no attribute "person_on_events_mode" [attr-defined] +posthog/queries/funnels/base.py:0: error: Incompatible types in assignment (expression has type "int | str | None", variable has type "str | None") [assignment] +posthog/queries/funnels/base.py:0: error: Unsupported right operand type for in ("object") [operator] +posthog/queries/funnels/base.py:0: error: "object" has no attribute "append" [attr-defined] +posthog/queries/funnels/base.py:0: error: Unsupported right operand type for in ("object") [operator] +posthog/queries/funnels/base.py:0: error: "object" has no attribute "append" [attr-defined] posthog/queries/funnels/base.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | int"; expected "str" [arg-type] posthog/queries/trends/trends_actors.py:0: error: Incompatible type for lookup 'pk': (got "str | None", expected "str | int") [misc] posthog/queries/paths/paths_actors.py:0: error: Incompatible types in assignment (expression has type "str", target has type "int | list[str] | None") [assignment] @@ -283,17 +354,29 @@ posthog/queries/paths/paths_actors.py:0: error: Incompatible types in assignment posthog/queries/paths/paths_actors.py:0: error: Incompatible types in assignment (expression has type "str", target has type "int | list[str] | None") [assignment] ee/clickhouse/queries/funnels/funnel_correlation.py:0: error: Statement is unreachable [unreachable] posthog/api/insight.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] -posthog/api/dashboards/dashboard.py:0: error: Argument 1 to "dashboard_queryset" of "DashboardTile" has incompatible type "DashboardTile_RelatedManager"; expected "_QuerySet[Any, Any]" [arg-type] +posthog/api/dashboards/dashboard.py:0: error: Argument 1 to "dashboard_queryset" of "DashboardTile" has incompatible type "DashboardTile_RelatedManager"; expected "QuerySet[Any, Any]" [arg-type] posthog/api/person.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] +posthog/api/person.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/person.py:0: error: Argument 1 to "loads" has incompatible type "str | None"; expected "str | bytes | bytearray" [arg-type] +posthog/api/person.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/person.py:0: error: Argument "user" to "log_activity" has incompatible type "User | AnonymousUser"; expected "User | None" [arg-type] +posthog/api/person.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/person.py:0: error: Argument "user" to "log_activity" has incompatible type "User | AnonymousUser"; expected "User | None" [arg-type] +posthog/api/person.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/person.py:0: error: Cannot determine type of "group_properties_filter_group" [has-type] +posthog/caching/insight_caching_state.py:0: error: Argument "params" to "execute" of "CursorWrapper" has incompatible type "list[object]"; expected "Sequence[bool | int | float | Decimal | str | <6 more items> | None] | Mapping[str, bool | int | float | Decimal | str | <6 more items> | None] | None" [arg-type] posthog/api/cohort.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] +posthog/api/cohort.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] +posthog/api/cohort.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/cohort.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] +posthog/caching/insights_api.py:0: error: Unsupported operand types for >= ("datetime" and "None") [operator] +posthog/caching/insights_api.py:0: note: Right operand is of type "datetime | None" posthog/api/feature_flag.py:0: error: Item "Sequence[Any]" of "Any | Sequence[Any] | None" has no attribute "filters" [union-attr] posthog/api/feature_flag.py:0: error: Item "None" of "Any | Sequence[Any] | None" has no attribute "filters" [union-attr] posthog/api/feature_flag.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] +posthog/api/feature_flag.py:0: error: Argument 2 to "get_all_feature_flags" has incompatible type "str | None"; expected "str" [arg-type] +posthog/api/feature_flag.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] +posthog/api/feature_flag.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/hogql_queries/web_analytics/web_analytics_query_runner.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/hogql_queries/web_analytics/web_analytics_query_runner.py:0: error: Argument 1 to "append" of "list" has incompatible type "EventPropertyFilter"; expected "Expr" [arg-type] posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] @@ -317,15 +400,24 @@ posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Item "SelectU posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "sample" [union-attr] posthog/hogql_queries/insights/funnels/funnels_query_runner.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/api/survey.py:0: error: Incompatible types in assignment (expression has type "Any | Sequence[Any] | None", variable has type "Survey | None") [assignment] +posthog/api/survey.py:0: error: Argument "item_id" to "log_activity" has incompatible type "UUID"; expected "int | str | UUIDT | None" [arg-type] +posthog/api/survey.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] posthog/api/survey.py:0: error: Item "list[_ErrorFullDetails]" of "_FullDetailDict | list[_ErrorFullDetails] | dict[str, _ErrorFullDetails]" has no attribute "get" [union-attr] posthog/api/survey.py:0: error: Item "object" of "object | Any" has no attribute "__iter__" (not iterable) [union-attr] posthog/hogql_queries/web_analytics/web_overview.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/hogql_queries/web_analytics/top_clicks.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] +posthog/api/user.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/api/user.py:0: error: Module has no attribute "utc" [attr-defined] posthog/api/user.py:0: error: "User" has no attribute "social_auth" [attr-defined] +ee/clickhouse/queries/related_actors_query.py:0: error: Argument 1 to "_query_related_groups" of "RelatedActorsQuery" has incompatible type "int"; expected "Literal[0, 1, 2, 3, 4]" [arg-type] ee/api/test/base.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "License") [assignment] ee/api/test/base.py:0: error: "setUpTestData" undefined in superclass [misc] posthog/warehouse/external_data_source/jobs.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "Status") [assignment] posthog/warehouse/external_data_source/jobs.py:0: error: Incompatible type for lookup 'id': (got "UUID | None", expected "UUID | str") [misc] +posthog/warehouse/api/test/test_table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_key" [union-attr] +posthog/warehouse/api/test/test_table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_secret" [union-attr] +posthog/warehouse/api/test/test_table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_key" [union-attr] +posthog/warehouse/api/test/test_table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_secret" [union-attr] posthog/test/test_utils.py:0: error: Argument 5 to "post" of "_RequestFactory" has incompatible type "**dict[str, str]"; expected "Mapping[str, Any] | None" [arg-type] posthog/test/test_plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/test/test_plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] @@ -336,6 +428,25 @@ posthog/test/test_health.py:0: error: "HttpResponse" has no attribute "json" [a posthog/test/test_health.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/test/test_health.py:0: error: Incompatible return value type (got "_MonkeyPatchedWSGIResponse", expected "HttpResponse") [return-value] posthog/test/test_health.py:0: error: Incompatible return value type (got "_MonkeyPatchedWSGIResponse", expected "HttpResponse") [return-value] +posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "name" [union-attr] +posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "name" [union-attr] +posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] +posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] +posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "delete" [union-attr] +posthog/test/activity_logging/test_activity_logging.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] +posthog/test/activity_logging/test_activity_logging.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] +posthog/test/activity_logging/test_activity_logging.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] +posthog/tasks/test/test_update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr] +posthog/tasks/test/test_stop_surveys_reached_target.py:0: error: No overload variant of "__sub__" of "datetime" matches argument type "None" [operator] +posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: Possible overload variants: +posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: def __sub__(self, datetime, /) -> timedelta +posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: def __sub__(self, timedelta, /) -> datetime +posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: Right operand is of type "datetime | None" +posthog/tasks/test/test_stop_surveys_reached_target.py:0: error: No overload variant of "__sub__" of "datetime" matches argument type "None" [operator] +posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: Possible overload variants: +posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: def __sub__(self, datetime, /) -> timedelta +posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: def __sub__(self, timedelta, /) -> datetime +posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: Right operand is of type "datetime | None" posthog/tasks/exports/test/test_image_exporter.py:0: error: Function is missing a type annotation [no-untyped-def] posthog/tasks/exports/test/test_image_exporter.py:0: error: Function is missing a type annotation [no-untyped-def] posthog/tasks/exports/test/test_image_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] @@ -347,6 +458,10 @@ posthog/tasks/exports/test/test_export_utils.py:0: error: Function is missing a posthog/tasks/exports/test/test_csv_exporter_url_sanitising.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter_url_sanitising.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Function is missing a type annotation [no-untyped-def] +posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "memoryview" of "bytes | memoryview | None" has no attribute "decode" [union-attr] +posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "None" of "bytes | memoryview | None" has no attribute "decode" [union-attr] +posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "memoryview" of "bytes | memoryview | None" has no attribute "decode" [union-attr] +posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "None" of "bytes | memoryview | None" has no attribute "decode" [union-attr] posthog/hogql_queries/test/test_query_runner.py:0: error: Variable "TestQueryRunner" is not valid as a type [valid-type] posthog/hogql_queries/test/test_query_runner.py:0: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases posthog/hogql_queries/test/test_query_runner.py:0: error: Invalid base class "TestQueryRunner" [misc] @@ -415,25 +530,66 @@ posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attrib posthog/hogql/database/schema/event_sessions.py:0: error: Statement is unreachable [unreachable] posthog/hogql/ai.py:0: error: No overload variant of "__getitem__" of "tuple" matches argument type "str" [call-overload] posthog/hogql/ai.py:0: note: Possible overload variants: -posthog/hogql/ai.py:0: note: def __getitem__(self, SupportsIndex, /) -> Any -posthog/hogql/ai.py:0: note: def __getitem__(self, slice, /) -> tuple[Any, ...] +posthog/hogql/ai.py:0: note: def __getitem__(self, SupportsIndex, /) -> str | Any +posthog/hogql/ai.py:0: note: def __getitem__(self, slice, /) -> tuple[str | Any, ...] posthog/heatmaps/test/test_heatmaps_api.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/heatmaps/test/test_heatmaps_api.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/heatmaps/test/test_heatmaps_api.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/heatmaps/test/test_heatmaps_api.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/heatmaps/test/test_heatmaps_api.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/heatmaps/test/test_heatmaps_api.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] +posthog/api/uploaded_media.py:0: error: Argument 1 to "read_bytes" has incompatible type "str | None"; expected "str" [arg-type] +posthog/api/uploaded_media.py:0: error: Argument 1 to "read_bytes" has incompatible type "str | None"; expected "str" [arg-type] posthog/api/test/test_utils.py:0: error: Incompatible types in assignment (expression has type "dict[str, str]", variable has type "QueryDict") [assignment] +posthog/api/test/test_survey.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "active" [union-attr] +posthog/api/test/test_stickiness.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/api/test/test_stickiness.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/api/test/test_stickiness.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/api/test/test_stickiness.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/api/test/test_stickiness.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/api/test/test_stickiness.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/api/test/test_stickiness.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/api/test/test_stickiness.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/api/test/test_stickiness.py:0: error: Module has no attribute "utc" [attr-defined] posthog/api/test/test_signup.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/api/test/test_signup.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/api/test/test_signup.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/api/test/test_signup.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/api/test/test_preflight.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/api/test/test_preflight.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] +posthog/api/test/test_personal_api_keys.py:0: error: Item "None" of "str | None" has no attribute "startswith" [union-attr] +posthog/api/test/test_personal_api_keys.py:0: error: Item "None" of "str | None" has no attribute "startswith" [union-attr] posthog/api/test/test_person.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type] +posthog/api/test/test_organization_domain.py:0: error: Item "None" of "datetime | None" has no attribute "strftime" [union-attr] +posthog/api/signup.py:0: error: Argument 1 to "create_user" of "UserManager" has incompatible type "str | None"; expected "str" [arg-type] +posthog/api/organization_member.py:0: error: "User" has no attribute "totpdevice_set" [attr-defined] +posthog/api/organization_member.py:0: error: "User" has no attribute "social_auth" [attr-defined] +posthog/api/organization_member.py:0: error: Signature of "update" incompatible with supertype "ModelSerializer" [override] +posthog/api/organization_member.py:0: note: Superclass: +posthog/api/organization_member.py:0: note: def update(self, instance: Any, validated_data: Any) -> Any +posthog/api/organization_member.py:0: note: Subclass: +posthog/api/organization_member.py:0: note: def update(self, updated_membership: Any, validated_data: Any, **kwargs: Any) -> Any +posthog/api/organization_member.py:0: error: Signature of "update" incompatible with supertype "BaseSerializer" [override] +posthog/api/organization_member.py:0: note: Superclass: +posthog/api/organization_member.py:0: note: def update(self, instance: Any, validated_data: Any) -> Any +posthog/api/organization_member.py:0: note: Subclass: +posthog/api/organization_member.py:0: note: def update(self, updated_membership: Any, validated_data: Any, **kwargs: Any) -> Any +posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | None" for "dict[str, int]"; expected type "str" [index] +posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | None" for "dict[str, int]"; expected type "str" [index] +posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | None" for "dict[str, int]"; expected type "str" [index] posthog/api/notebook.py:0: error: Incompatible types in assignment (expression has type "int", variable has type "str | None") [assignment] +posthog/api/exports.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] +posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "DataWarehouseCredential | Combinable | None") [assignment] +posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "str | int | Combinable") [assignment] posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "dict[str, dict[str, str | bool]] | dict[str, str]", variable has type "dict[str, dict[str, str]]") [assignment] -posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Incompatible types in assignment (expression has type "list[str]", variable has type "dict[str, list[tuple[str, str]]]") [assignment] +posthog/warehouse/data_load/source_templates.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "Type") [assignment] +posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 1 has incompatible type "str"; expected "Type" [arg-type] +posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Incompatible types in assignment (expression has type "list[Any]", variable has type "dict[str, list[tuple[str, str]]]") [assignment] +posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: No overload variant of "get" of "dict" matches argument types "str", "tuple[()]" [call-overload] +posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: Possible overload variants: +posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def get(self, Type, /) -> Sequence[str] | None +posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def get(self, Type, Sequence[str], /) -> Sequence[str] +posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def [_T] get(self, Type, _T, /) -> Sequence[str] | _T posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 1 has incompatible type "dict[str, list[tuple[str, str]]]"; expected "list[Any]" [arg-type] posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc] posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type] @@ -459,10 +615,16 @@ posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] +posthog/tasks/exports/test/test_csv_exporter.py:0: error: Argument 1 to "BytesIO" has incompatible type "bytes | memoryview | None"; expected "Buffer" [arg-type] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] +posthog/tasks/exports/test/test_csv_exporter.py:0: error: Argument 1 to "read" has incompatible type "str | None"; expected "str" [arg-type] +posthog/tasks/exports/test/test_csv_exporter.py:0: error: Argument 1 to "read" has incompatible type "str | None"; expected "str" [arg-type] +posthog/tasks/exports/test/test_csv_exporter.py:0: error: Argument 1 to "read" has incompatible type "str | None"; expected "str" [arg-type] +posthog/tasks/exports/test/test_csv_exporter.py:0: error: Argument 1 to "read" has incompatible type "str | None"; expected "str" [arg-type] +posthog/tasks/exports/test/test_csv_exporter.py:0: error: Argument 1 to "read" has incompatible type "str | None"; expected "str" [arg-type] posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get" [attr-defined] posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "_MonkeyPatchedResponse"; expected type "str" [index] posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get" [attr-defined] @@ -486,14 +648,29 @@ posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" fo posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get" [attr-defined] posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "_MonkeyPatchedResponse"; expected type "str" [index] posthog/models/test/test_organization_model.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] +posthog/management/commands/sync_persons_to_clickhouse.py:0: error: Argument 4 to "create_person_override" has incompatible type "int | None"; expected "int" [arg-type] posthog/management/commands/sync_persons_to_clickhouse.py:0: error: Argument "group_type_index" to "raw_create_group_ch" has incompatible type "int"; expected "Literal[0, 1, 2, 3, 4]" [arg-type] posthog/management/commands/migrate_team.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "BatchExport") [assignment] +posthog/management/commands/migrate_team.py:0: error: "BatchExportDestination" has no attribute "exclude_events" [attr-defined] +posthog/management/commands/migrate_team.py:0: error: "BatchExportDestination" has no attribute "include_events" [attr-defined] +posthog/management/commands/migrate_action_webhooks.py:0: error: Argument 1 to "convert_link" has incompatible type "str | None"; expected "str" [arg-type] +posthog/management/commands/migrate_action_webhooks.py:0: error: Argument 2 to "replace" of "str" has incompatible type "str | None"; expected "str" [arg-type] +posthog/management/commands/fix_future_person_created_at.py:0: error: Argument "version" to "create_person" has incompatible type "int | None"; expected "int" [arg-type] posthog/hogql/test/test_query.py:0: error: Value of type "list[QueryTiming] | None" is not indexable [index] posthog/hogql/test/test_query.py:0: error: Value of type "list[QueryTiming] | None" is not indexable [index] +posthog/hogql/test/test_query.py:0: error: Module has no attribute "utc" [attr-defined] posthog/hogql/test/test_query.py:0: error: Argument 1 to "pretty_print_in_tests" has incompatible type "str | None"; expected "str" [arg-type] posthog/hogql/test/test_query.py:0: error: Argument 1 to "pretty_print_in_tests" has incompatible type "str | None"; expected "str" [arg-type] posthog/hogql/test/test_query.py:0: error: Argument 1 to "pretty_print_in_tests" has incompatible type "str | None"; expected "str" [arg-type] posthog/hogql/test/test_query.py:0: error: Argument 1 to "pretty_print_in_tests" has incompatible type "str | None"; expected "str" [arg-type] +posthog/hogql/test/test_query.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/hogql/test/test_query.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/hogql/test/test_query.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/hogql/test/test_query.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/hogql/test/test_query.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/hogql/test/test_query.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/hogql/test/test_query.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/hogql/test/test_query.py:0: error: Module has no attribute "utc" [attr-defined] posthog/hogql/test/test_query.py:0: error: Argument "placeholders" to "execute_hogql_query" has incompatible type "dict[str, Constant]"; expected "dict[str, Expr] | None" [arg-type] posthog/hogql/test/test_query.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance posthog/hogql/test/test_query.py:0: note: Consider using "Mapping" instead, which is covariant in the value type @@ -511,13 +688,21 @@ posthog/hogql/test/test_parse_string_python.py:0: error: Unsupported dynamic bas posthog/hogql/test/test_parse_string_cpp.py:0: error: Unsupported dynamic base class "parse_string_test_factory" [misc] posthog/hogql/database/test/test_view.py:0: error: Argument "dialect" to "print_ast" has incompatible type "str"; expected "Literal['hogql', 'clickhouse']" [arg-type] posthog/hogql/database/test/test_s3_table.py:0: error: Argument "dialect" to "print_ast" has incompatible type "str"; expected "Literal['hogql', 'clickhouse']" [arg-type] +posthog/async_migrations/test/test_runner.py:0: error: Item "None" of "datetime | None" has no attribute "day" [union-attr] +posthog/api/test/test_insight.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type] +posthog/api/test/test_insight.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type] +posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] +posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "name" [union-attr] +posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "description" [union-attr] +posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "filters" [union-attr] +posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] +posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "name" [union-attr] +posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "description" [union-attr] +posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "filters" [union-attr] +posthog/api/test/dashboards/test_dashboard.py:0: error: Value of type variable "_S" of "assertAlmostEqual" of "TestCase" cannot be "datetime | None" [type-var] posthog/api/test/dashboards/test_dashboard.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] posthog/api/test/dashboards/test_dashboard.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] posthog/api/test/dashboards/test_dashboard.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] -posthog/api/test/dashboards/test_dashboard.py:0: error: Argument 1 to "soft_delete" of "DashboardAPI" has incompatible type "int | _FieldDescriptor[AutoField[Combinable | int | str | None, int]] | Any"; expected "int" [arg-type] -posthog/api/test/dashboards/test_dashboard.py:0: error: Argument 1 to "get_dashboard" of "DashboardAPI" has incompatible type "int | _FieldDescriptor[AutoField[Combinable | int | str | None, int]] | Any"; expected "int" [arg-type] -posthog/api/test/dashboards/test_dashboard.py:0: error: Argument 1 to "update_dashboard" of "DashboardAPI" has incompatible type "int | _FieldDescriptor[AutoField[Combinable | int | str | None, int]] | Any"; expected "int" [arg-type] -posthog/api/test/dashboards/test_dashboard.py:0: error: Argument 1 to "get_dashboard" of "DashboardAPI" has incompatible type "int | _FieldDescriptor[AutoField[Combinable | int | str | None, int]] | Any"; expected "int" [arg-type] posthog/api/search.py:0: error: Argument "klass" to "class_queryset" has incompatible type "object"; expected "type[Model]" [arg-type] posthog/api/search.py:0: error: Argument "search_fields" to "class_queryset" has incompatible type "object"; expected "dict[str, str]" [arg-type] posthog/api/search.py:0: error: Argument "extra_fields" to "class_queryset" has incompatible type "object"; expected "dict[Any, Any] | None" [arg-type] @@ -528,7 +713,7 @@ posthog/api/property_definition.py:0: error: Item "AnonymousUser" of "User | Ano posthog/api/property_definition.py:0: error: Item "None" of "Organization | Any | None" has no attribute "is_feature_available" [union-attr] posthog/api/property_definition.py:0: error: Item "ForeignObjectRel" of "Field[Any, Any] | ForeignObjectRel | GenericForeignKey" has no attribute "cached_col" [union-attr] posthog/api/property_definition.py:0: error: Item "GenericForeignKey" of "Field[Any, Any] | ForeignObjectRel | GenericForeignKey" has no attribute "cached_col" [union-attr] -posthog/api/property_definition.py:0: error: Incompatible types in assignment (expression has type "Manager[EnterprisePropertyDefinition]", variable has type "_QuerySet[PropertyDefinition, PropertyDefinition]") [assignment] +posthog/api/property_definition.py:0: error: Incompatible types in assignment (expression has type "Manager[EnterprisePropertyDefinition]", variable has type "QuerySet[PropertyDefinition, PropertyDefinition]") [assignment] posthog/api/property_definition.py:0: error: Item "BasePagination" of "BasePagination | None" has no attribute "get_limit" [union-attr] posthog/api/property_definition.py:0: error: Item "None" of "BasePagination | None" has no attribute "get_limit" [union-attr] posthog/api/property_definition.py:0: error: Item "BasePagination" of "BasePagination | None" has no attribute "get_offset" [union-attr] @@ -541,6 +726,39 @@ posthog/api/property_definition.py:0: error: Incompatible types in assignment (e posthog/api/property_definition.py:0: error: Item "AnonymousUser" of "User | AnonymousUser" has no attribute "organization" [union-attr] posthog/api/property_definition.py:0: error: Item "None" of "Organization | Any | None" has no attribute "is_feature_available" [union-attr] posthog/api/event.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] +posthog/admin/inlines/plugin_attachment_inline.py:0: error: Signature of "has_add_permission" incompatible with supertype "BaseModelAdmin" [override] +posthog/admin/inlines/plugin_attachment_inline.py:0: note: Superclass: +posthog/admin/inlines/plugin_attachment_inline.py:0: note: def has_add_permission(self, request: HttpRequest) -> bool +posthog/admin/inlines/plugin_attachment_inline.py:0: note: Subclass: +posthog/admin/inlines/plugin_attachment_inline.py:0: note: def has_add_permission(self, request: Any, obj: Any) -> Any +posthog/admin/inlines/plugin_attachment_inline.py:0: error: Signature of "has_change_permission" incompatible with supertype "InlineModelAdmin" [override] +posthog/admin/inlines/plugin_attachment_inline.py:0: note: Superclass: +posthog/admin/inlines/plugin_attachment_inline.py:0: note: def has_change_permission(self, request: HttpRequest, obj: Any | None = ...) -> bool +posthog/admin/inlines/plugin_attachment_inline.py:0: note: Subclass: +posthog/admin/inlines/plugin_attachment_inline.py:0: note: def has_change_permission(self, request: Any, obj: Any) -> Any +posthog/admin/inlines/plugin_attachment_inline.py:0: error: Signature of "has_change_permission" incompatible with supertype "BaseModelAdmin" [override] +posthog/admin/inlines/plugin_attachment_inline.py:0: note: Superclass: +posthog/admin/inlines/plugin_attachment_inline.py:0: note: def has_change_permission(self, request: HttpRequest, obj: Any | None = ...) -> bool +posthog/admin/inlines/plugin_attachment_inline.py:0: note: Subclass: +posthog/admin/inlines/plugin_attachment_inline.py:0: note: def has_change_permission(self, request: Any, obj: Any) -> Any +posthog/admin/inlines/plugin_attachment_inline.py:0: error: Signature of "has_delete_permission" incompatible with supertype "InlineModelAdmin" [override] +posthog/admin/inlines/plugin_attachment_inline.py:0: note: Superclass: +posthog/admin/inlines/plugin_attachment_inline.py:0: note: def has_delete_permission(self, request: HttpRequest, obj: Any | None = ...) -> bool +posthog/admin/inlines/plugin_attachment_inline.py:0: note: Subclass: +posthog/admin/inlines/plugin_attachment_inline.py:0: note: def has_delete_permission(self, request: Any, obj: Any) -> Any +posthog/admin/inlines/plugin_attachment_inline.py:0: error: Signature of "has_delete_permission" incompatible with supertype "BaseModelAdmin" [override] +posthog/admin/inlines/plugin_attachment_inline.py:0: note: Superclass: +posthog/admin/inlines/plugin_attachment_inline.py:0: note: def has_delete_permission(self, request: HttpRequest, obj: Any | None = ...) -> bool +posthog/admin/inlines/plugin_attachment_inline.py:0: note: Subclass: +posthog/admin/inlines/plugin_attachment_inline.py:0: note: def has_delete_permission(self, request: Any, obj: Any) -> Any +posthog/admin/admins/team_admin.py:0: error: Item "None" of "Project | None" has no attribute "pk" [union-attr] +posthog/admin/admins/team_admin.py:0: error: Item "None" of "Project | None" has no attribute "name" [union-attr] +posthog/admin/admins/plugin_admin.py:0: error: Item "None" of "Organization | None" has no attribute "pk" [union-attr] +posthog/admin/admins/plugin_admin.py:0: error: Item "None" of "Organization | None" has no attribute "name" [union-attr] +ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseTrendExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type] +ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseFunnelExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type] +ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseSecondaryExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type] +ee/clickhouse/views/experiments.py:0: error: Item "None" of "User | None" has no attribute "email" [union-attr] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] @@ -562,14 +780,27 @@ posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "typ posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/tasks/test/test_email.py:0: error: Argument 1 to "send_batch_export_run_failure" has incompatible type "UUID"; expected "str" [arg-type] +posthog/tasks/test/test_email.py:0: error: Argument 1 to "send_batch_export_run_failure" has incompatible type "UUID"; expected "str" [arg-type] +posthog/tasks/test/test_email.py:0: error: Argument 1 to "send_batch_export_run_failure" has incompatible type "UUID"; expected "str" [arg-type] posthog/session_recordings/session_recording_api.py:0: error: Argument "team_id" to "get_realtime_snapshots" has incompatible type "int"; expected "str" [arg-type] +posthog/session_recordings/session_recording_api.py:0: error: Value of type variable "SupportsRichComparisonT" of "sorted" cannot be "str | None" [type-var] +posthog/session_recordings/session_recording_api.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type] posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type] posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type] -posthog/api/test/test_decide.py:0: error: Item "None" of "Any | None" has no attribute "toolbar_mode" [union-attr] -posthog/api/test/test_decide.py:0: error: Item "None" of "Any | None" has no attribute "save" [union-attr] -posthog/api/plugin.py:0: error: Item "None" of "IO[Any] | None" has no attribute "read" [union-attr] -posthog/api/plugin.py:0: error: Item "None" of "IO[Any] | None" has no attribute "read" [union-attr] +posthog/queries/app_metrics/historical_exports.py:0: error: Argument 1 to "loads" has incompatible type "str | None"; expected "str | bytes | bytearray" [arg-type] +posthog/api/test/test_decide.py:0: error: Item "None" of "User | None" has no attribute "toolbar_mode" [union-attr] +posthog/api/test/test_decide.py:0: error: Item "None" of "User | None" has no attribute "save" [union-attr] +posthog/api/test/test_authentication.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/admin/admins/plugin_config_admin.py:0: error: Item "None" of "Team | None" has no attribute "name" [union-attr] +posthog/warehouse/api/external_data_schema.py:0: error: Incompatible return value type (got "str | None", expected "SyncType | None") [return-value] +posthog/warehouse/api/external_data_schema.py:0: error: Argument 1 to "get_sql_schemas_for_source_type" has incompatible type "str"; expected "Type" [arg-type] +posthog/warehouse/api/external_data_schema.py:0: error: No overload variant of "get" of "dict" matches argument type "str" [call-overload] +posthog/warehouse/api/external_data_schema.py:0: note: Possible overload variants: +posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, /) -> dict[str, list[IncrementalField]] | None +posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, dict[str, list[IncrementalField]], /) -> dict[str, list[IncrementalField]] +posthog/warehouse/api/external_data_schema.py:0: note: def [_T] get(self, Type, _T, /) -> dict[str, list[IncrementalField]] | _T posthog/warehouse/api/external_data_source.py:0: error: Incompatible return value type (got "tuple[ExternalDataSource, dict[str, list[tuple[str, str]]]]", expected "tuple[ExternalDataSource, list[Any]]") [return-value] posthog/warehouse/api/external_data_source.py:0: error: Incompatible return value type (got "tuple[ExternalDataSource, dict[str, list[tuple[str, str]]]]", expected "tuple[ExternalDataSource, list[Any]]") [return-value] posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] @@ -579,29 +810,53 @@ posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: List item 0 has incompatible type "tuple[str, str, int, int, int, int, str, int]"; expected "tuple[str, str, int, int, str, str, str, str]" [list-item] posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py:0: error: "tuple[Any, ...]" has no attribute "last_uploaded_part_timestamp" [attr-defined] posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py:0: error: "tuple[Any, ...]" has no attribute "upload_state" [attr-defined] +posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "job_type" to "PipelineInputs" has incompatible type "str"; expected "Type" [arg-type] +posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "source_type" to "sql_source_for_type" has incompatible type "str"; expected "Type" [arg-type] +posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "source_type" to "sql_source_for_type" has incompatible type "str"; expected "Type" [arg-type] posthog/migrations/0237_remove_timezone_from_teams.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type] posthog/migrations/0228_fix_tile_layouts.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type] -posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined] -posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] -posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined] -posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "int", target has type "str") [assignment] posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "int", target has type "str") [assignment] posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "dict[str, Collection[str]]", variable has type "StripeSourcePayload") [assignment] posthog/warehouse/external_data_source/source.py:0: error: Argument 1 to "_create_source" has incompatible type "StripeSourcePayload"; expected "dict[Any, Any]" [arg-type] posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py:0: error: Incompatible types in assignment (expression has type "str | int", variable has type "int") [assignment] +posthog/api/sharing.py:0: error: Item "None" of "list[Any] | None" has no attribute "__iter__" (not iterable) [union-attr] +posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr] +posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID | Any"; expected "UUIDT | None" [arg-type] +posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr] +posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr] +posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID | Any"; expected "UUIDT | None" [arg-type] +posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr] +posthog/api/plugin.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str | int | Combinable") [assignment] +posthog/api/plugin.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str | int | Combinable") [assignment] +posthog/api/plugin.py:0: error: Incompatible types in assignment (expression has type "int | None", variable has type "float | int | str | Combinable") [assignment] +posthog/api/plugin.py:0: error: Item "None" of "IO[Any] | None" has no attribute "read" [union-attr] +posthog/api/plugin.py:0: error: Incompatible type for "file_name" of "PluginAttachment" (got "str | None", expected "str | int | Combinable") [misc] +posthog/api/plugin.py:0: error: Incompatible type for "file_size" of "PluginAttachment" (got "int | None", expected "float | int | str | Combinable") [misc] +posthog/api/plugin.py:0: error: Item "None" of "IO[Any] | None" has no attribute "read" [union-attr] +posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr] +posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID | Any"; expected "UUIDT | None" [arg-type] +posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr] +posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] +posthog/api/plugin.py:0: error: Argument "organization_id" to "log_activity" has incompatible type "UUID"; expected "UUIDT | None" [arg-type] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/api/test/batch_exports/conftest.py:0: error: Signature of "run" incompatible with supertype "Worker" [override] +posthog/api/test/batch_exports/conftest.py:0: note: Superclass: +posthog/api/test/batch_exports/conftest.py:0: note: def run(self) -> Coroutine[Any, Any, None] +posthog/api/test/batch_exports/conftest.py:0: note: Subclass: +posthog/api/test/batch_exports/conftest.py:0: note: def run(self, loop: Any) -> Any posthog/api/test/batch_exports/conftest.py:0: error: Argument "activities" to "ThreadedWorker" has incompatible type "list[function]"; expected "Sequence[Callable[..., Any]]" [arg-type] +posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined] +posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] +posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined] +posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/temporal/tests/data_imports/test_end_to_end.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] -posthog/api/test/test_capture.py:0: error: Statement is unreachable [unreachable] -posthog/api/test/test_capture.py:0: error: Incompatible return value type (got "_MonkeyPatchedWSGIResponse", expected "HttpResponse") [return-value] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item] posthog/test/test_middleware.py:0: error: Incompatible types in assignment (expression has type "_MonkeyPatchedWSGIResponse", variable has type "_MonkeyPatchedResponse") [assignment] posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible return value type (got "dict[str, Collection[str]]", expected "dict[str, str]") [return-value] posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible types in assignment (expression has type "dict[str, Collection[str]]", variable has type "dict[str, str]") [assignment] @@ -637,9 +892,22 @@ posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: posthog/management/commands/test/test_create_batch_export_from_app.py:0: note: Possible overload variants: posthog/management/commands/test/test_create_batch_export_from_app.py:0: note: def __getitem__(self, SupportsIndex, /) -> str posthog/management/commands/test/test_create_batch_export_from_app.py:0: note: def __getitem__(self, slice, /) -> list[str] +posthog/api/test/test_capture.py:0: error: Statement is unreachable [unreachable] +posthog/api/test/test_capture.py:0: error: Incompatible return value type (got "_MonkeyPatchedWSGIResponse", expected "HttpResponse") [return-value] +posthog/api/test/test_capture.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] +posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] +posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] posthog/api/test/batch_exports/test_update.py:0: error: Unsupported target for indexed assignment ("Collection[str]") [index] posthog/api/test/batch_exports/test_update.py:0: error: Unsupported target for indexed assignment ("Collection[str]") [index] posthog/api/test/batch_exports/test_update.py:0: error: Dict entry 1 has incompatible type "str": "dict[str, Collection[str]]"; expected "str": "str" [dict-item] +posthog/api/test/batch_exports/test_update.py:0: error: Argument 3 to "get_batch_export_ok" has incompatible type "UUID"; expected "int" [arg-type] posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] diff --git a/plugin-server/package.json b/plugin-server/package.json index 25df603b90e05..ea7467fb42993 100644 --- a/plugin-server/package.json +++ b/plugin-server/package.json @@ -27,7 +27,9 @@ "services:start": "cd .. && docker compose -f docker-compose.dev.yml up", "services:stop": "cd .. && docker compose -f docker-compose.dev.yml down", "services:clean": "cd .. && docker compose -f docker-compose.dev.yml rm -v", - "services": "pnpm services:stop && pnpm services:clean && pnpm services:start" + "services": "pnpm services:stop && pnpm services:clean && pnpm services:start", + "build:cyclotron": "cd ../rust/cyclotron-node && pnpm run package", + "pnpm:devPreinstall": "pnpm run build:cyclotron" }, "graphile-worker": { "maxContiguousErrors": 300 @@ -50,7 +52,7 @@ "@google-cloud/storage": "^5.8.5", "@maxmind/geoip2-node": "^3.4.0", "@posthog/clickhouse": "^1.7.0", - "@posthog/hogvm": "^1.0.32", + "@posthog/hogvm": "^1.0.36", "@posthog/plugin-scaffold": "1.4.4", "@sentry/node": "^7.49.0", "@sentry/profiling-node": "^0.3.0", @@ -86,7 +88,8 @@ "uuid": "^9.0.1", "v8-profiler-next": "^1.9.0", "vm2": "3.9.18", - "detect-browser": "^5.3.0" + "detect-browser": "^5.3.0", + "@posthog/cyclotron": "file:../rust/cyclotron-node" }, "devDependencies": { "0x": "^5.5.0", diff --git a/plugin-server/pnpm-lock.yaml b/plugin-server/pnpm-lock.yaml index c2c4bc52093ac..faedff7194010 100644 --- a/plugin-server/pnpm-lock.yaml +++ b/plugin-server/pnpm-lock.yaml @@ -43,9 +43,12 @@ dependencies: '@posthog/clickhouse': specifier: ^1.7.0 version: 1.7.0 + '@posthog/cyclotron': + specifier: file:../rust/cyclotron-node + version: file:../rust/cyclotron-node '@posthog/hogvm': - specifier: ^1.0.32 - version: 1.0.32(luxon@3.4.4)(re2@1.20.3) + specifier: ^1.0.36 + version: 1.0.36(luxon@3.4.4)(re2@1.20.3) '@posthog/plugin-scaffold': specifier: 1.4.4 version: 1.4.4 @@ -3113,8 +3116,8 @@ packages: engines: {node: '>=12'} dev: false - /@posthog/hogvm@1.0.32(luxon@3.4.4)(re2@1.20.3): - resolution: {integrity: sha512-OjgSzs4fZ1Q0KEiON34/dH9TybfVfallANcgoRiNhUd9KstSm55Ds5cpK6HjGMfRRPpPULuDQ77RH3DBjJ2CCA==} + /@posthog/hogvm@1.0.36(luxon@3.4.4)(re2@1.20.3): + resolution: {integrity: sha512-O4mVlTCNYAOg3eh82r5YE0HqMf4b0em7JNHCD6tkUUMxe47rTAVflSW/vVOkV7ogfgyjxQD0bOS6w708FW/cMg==} peerDependencies: luxon: ^3.4.4 re2: ^1.21.3 @@ -10731,3 +10734,9 @@ packages: /yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} + + file:../rust/cyclotron-node: + resolution: {directory: ../rust/cyclotron-node, type: directory} + name: '@posthog/cyclotron' + version: 0.1.0 + dev: false diff --git a/plugin-server/src/capabilities.ts b/plugin-server/src/capabilities.ts index 11158a284b951..7b8c8461b78ad 100644 --- a/plugin-server/src/capabilities.ts +++ b/plugin-server/src/capabilities.ts @@ -26,6 +26,7 @@ export function getPluginServerCapabilities(config: PluginsServerConfig): Plugin cdpProcessedEvents: true, cdpFunctionCallbacks: true, cdpFunctionOverflow: true, + cdpCyclotronWorker: true, syncInlinePlugins: true, ...sharedCapabilities, } @@ -108,6 +109,11 @@ export function getPluginServerCapabilities(config: PluginsServerConfig): Plugin cdpFunctionOverflow: true, ...sharedCapabilities, } + case PluginServerMode.cdp_cyclotron_worker: + return { + cdpCyclotronWorker: true, + ...sharedCapabilities, + } // This is only for functional tests, which time out if all capabilities are used // ideally we'd run just the specific capability needed per test, but that's not easy to do atm case PluginServerMode.functional_tests: diff --git a/plugin-server/src/cdp/async-function-executor.ts b/plugin-server/src/cdp/async-function-executor.ts index 93dd7e285cb78..fe6df753dc723 100644 --- a/plugin-server/src/cdp/async-function-executor.ts +++ b/plugin-server/src/cdp/async-function-executor.ts @@ -1,3 +1,4 @@ +import cyclotron from '@posthog/cyclotron' import { Histogram } from 'prom-client' import { buildIntegerMatcher } from '../config/config' @@ -27,9 +28,11 @@ export type AsyncFunctionExecutorOptions = { export class AsyncFunctionExecutor { hogHookEnabledForTeams: ValueMatcher + cyclotronEnabledForTeams: ValueMatcher constructor(private serverConfig: PluginsServerConfig, private rustyHook: RustyHook) { this.hogHookEnabledForTeams = buildIntegerMatcher(serverConfig.CDP_ASYNC_FUNCTIONS_RUSTY_HOOK_TEAMS, true) + this.cyclotronEnabledForTeams = buildIntegerMatcher(serverConfig.CDP_ASYNC_FUNCTIONS_CYCLOTRON_TEAMS, true) } async execute( @@ -99,8 +102,39 @@ export class AsyncFunctionExecutor { histogramFetchPayloadSize.observe(body.length / 1024) } - // If the caller hasn't forced it to be synchronous and the team has the rustyhook enabled, enqueue it - if (!options?.sync && this.hogHookEnabledForTeams(request.teamId)) { + // If the caller hasn't forced it to be synchronous and the team has the cyclotron or + // rustyhook enabled, enqueue it in one of those services. + if (!options?.sync && this.cyclotronEnabledForTeams(request.teamId)) { + try { + await cyclotron.createJob({ + teamId: request.teamId, + functionId: request.hogFunctionId, + queueName: 'fetch', + // TODO: The async function compression changes happen upstream of this + // function. I guess we'll want to unwind that change because we actually + // want the `vmState` (and the rest of state) so we can put it into PG here. + vmState: '', + parameters: JSON.stringify({ + return_queue: 'hog', + url, + method, + headers, + // The body is passed in the `blob` field below. + }), + metadata: JSON.stringify({}), + // Fetch bodies are passed in the binary blob column/field. + blob: toUint8Array(body), + }) + } catch (e) { + status.error( + '🦔', + `[HogExecutor] Cyclotron failed to enqueue async fetch function, sending directly instead`, + { + error: e, + } + ) + } + } else if (!options?.sync && this.hogHookEnabledForTeams(request.teamId)) { const hoghooksPayload = JSON.stringify(request) histogramHogHooksPayloadSize.observe(hoghooksPayload.length / 1024) @@ -154,3 +188,23 @@ export class AsyncFunctionExecutor { return response } } + +function toUint8Array(data: any): Uint8Array | undefined { + if (data === null || data === undefined) { + return undefined + } + + if (data instanceof Uint8Array) { + return data + } + + if (data instanceof ArrayBuffer) { + return new Uint8Array(data) + } + + if (typeof data === 'string') { + return new TextEncoder().encode(data) + } + + return new TextEncoder().encode(JSON.stringify(data)) +} diff --git a/plugin-server/src/cdp/cdp-api.ts b/plugin-server/src/cdp/cdp-api.ts index 553e380e16cdf..943091af13814 100644 --- a/plugin-server/src/cdp/cdp-api.ts +++ b/plugin-server/src/cdp/cdp-api.ts @@ -1,4 +1,3 @@ -import { convertJSToHog } from '@posthog/hogvm' import express from 'express' import { DateTime } from 'luxon' @@ -147,7 +146,7 @@ export class CdpApi { }) // Add the state, simulating what executeAsyncResponse would do - invocation.vmState!.stack.push(convertJSToHog({ status: 200, body: {} })) + invocation.vmState!.stack.push({ status: 200, body: {} }) } else { const asyncInvocationRequest: HogFunctionInvocationAsyncRequest = { state: '', // WE don't care about the state for this level of testing @@ -166,7 +165,7 @@ export class CdpApi { message: 'Failed to execute async function', }) } - invocation.vmState!.stack.push(convertJSToHog(asyncRes?.asyncFunctionResponse.response ?? null)) + invocation.vmState!.stack.push(asyncRes?.asyncFunctionResponse.response ?? null) } logs.push(...response.logs) diff --git a/plugin-server/src/cdp/cdp-consumers.ts b/plugin-server/src/cdp/cdp-consumers.ts index 89a5b3a3a597c..fef401d472927 100644 --- a/plugin-server/src/cdp/cdp-consumers.ts +++ b/plugin-server/src/cdp/cdp-consumers.ts @@ -1,3 +1,4 @@ +import cyclotron from '@posthog/cyclotron' import { captureException } from '@sentry/node' import { features, librdkafkaVersion, Message } from 'node-rdkafka' import { Counter, Histogram } from 'prom-client' @@ -443,7 +444,12 @@ abstract class CdpConsumerBase { const globalConnectionConfig = createRdConnectionConfigFromEnvVars(this.hub) const globalProducerConfig = createRdProducerConfigFromEnvVars(this.hub) - await Promise.all([this.hogFunctionManager.start()]) + await Promise.all([ + this.hogFunctionManager.start(), + this.hub.CYCLOTRON_DATABASE_URL + ? cyclotron.initManager({ shards: [{ dbUrl: this.hub.CYCLOTRON_DATABASE_URL }] }) + : Promise.resolve(), + ]) this.kafkaProducer = new KafkaProducerWrapper( await createKafkaProducer(globalConnectionConfig, globalProducerConfig) @@ -693,3 +699,57 @@ export class CdpOverflowConsumer extends CdpConsumerBase { return invocationGlobals } } + +// TODO: Split out non-Kafka specific parts of CdpConsumerBase so that it can be used by the +// Cyclotron worker below. Or maybe we can just wait, and rip the Kafka bits out once Cyclotron is +// shipped (and rename it something other than consomer, probably). For now, this is an easy way to +// use existing code and get an end-to-end demo shipped. +export class CdpCyclotronWorker extends CdpConsumerBase { + protected name = 'CdpCyclotronWorker' + protected topic = 'UNUSED-CdpCyclotronWorker' + protected consumerGroupId = 'UNUSED-CdpCyclotronWorker' + private runningWorker: Promise | undefined + private isUnhealthy = false + + public async _handleEachBatch(_: Message[]): Promise { + // Not called, we override `start` below to use Cyclotron instead. + } + + private async innerStart() { + try { + const limit = 100 // TODO: Make configurable. + while (!this.isStopping) { + const jobs = await cyclotron.dequeueJobsWithVmState('hog', limit) + for (const job of jobs) { + // TODO: Reassemble a HogFunctionInvocationAsyncResponse (or whatever proper type) + // from the fields on the job, and then execute the next Hog step. + console.log(job.id) + } + } + } catch (err) { + this.isUnhealthy = true + console.error('Error in Cyclotron worker', err) + throw err + } + } + + public async start() { + await cyclotron.initManager({ shards: [{ dbUrl: this.hub.CYCLOTRON_DATABASE_URL }] }) + await cyclotron.initWorker({ dbUrl: this.hub.CYCLOTRON_DATABASE_URL }) + + // Consumer `start` expects an async task is started, and not that `start` itself blocks + // indefinitely. + this.runningWorker = this.innerStart() + + return Promise.resolve() + } + + public async stop() { + await super.stop() + await this.runningWorker + } + + public isHealthy() { + return this.isUnhealthy + } +} diff --git a/plugin-server/src/cdp/hog-executor.ts b/plugin-server/src/cdp/hog-executor.ts index 6b590994000c4..3cd4ca074294d 100644 --- a/plugin-server/src/cdp/hog-executor.ts +++ b/plugin-server/src/cdp/hog-executor.ts @@ -1,4 +1,4 @@ -import { calculateCost, convertHogToJS, convertJSToHog, exec, ExecResult } from '@posthog/hogvm' +import { calculateCost, convertHogToJS, exec, ExecResult } from '@posthog/hogvm' import { DateTime } from 'luxon' import { Histogram } from 'prom-client' @@ -30,9 +30,9 @@ const hogExecutionDuration = new Histogram({ export const formatInput = (bytecode: any, globals: HogFunctionInvocation['globals']): any => { // Similar to how we generate the bytecode by iterating over the values, // here we iterate over the object and replace the bytecode with the actual values - // bytecode is indicated as an array beginning with ["_h"] + // bytecode is indicated as an array beginning with ["_H"] (versions 1+) or ["_h"] (version 0) - if (Array.isArray(bytecode) && bytecode[0] === '_h') { + if (Array.isArray(bytecode) && (bytecode[0] === '_h' || bytecode[0] === '_H')) { const res = exec(bytecode, { globals, timeout: DEFAULT_TIMEOUT_MS, @@ -217,7 +217,7 @@ export class HogExecutor { } // Add the response to the stack to continue execution - invocation.vmState.stack.push(convertJSToHog(response)) + invocation.vmState.stack.push(response) invocation.timings.push(...timings) const res = this.execute(hogFunction, invocation) diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts index c3fea809c614f..7de2856530e14 100644 --- a/plugin-server/src/config/config.ts +++ b/plugin-server/src/config/config.ts @@ -187,9 +187,13 @@ export function getDefaultConfig(): PluginsServerConfig { CDP_WATCHER_REFILL_RATE: 10, CDP_WATCHER_DISABLED_TEMPORARY_MAX_COUNT: 3, CDP_ASYNC_FUNCTIONS_RUSTY_HOOK_TEAMS: '', + CDP_ASYNC_FUNCTIONS_CYCLOTRON_TEAMS: '', CDP_REDIS_PASSWORD: '', CDP_REDIS_HOST: '', CDP_REDIS_PORT: 6479, + + // Cyclotron + CYCLOTRON_DATABASE_URL: '', } } diff --git a/plugin-server/src/main/pluginsServer.ts b/plugin-server/src/main/pluginsServer.ts index 0bcbf0e63597f..3a7e8851774a4 100644 --- a/plugin-server/src/main/pluginsServer.ts +++ b/plugin-server/src/main/pluginsServer.ts @@ -11,7 +11,12 @@ import v8Profiler from 'v8-profiler-next' import { getPluginServerCapabilities } from '../capabilities' import { CdpApi } from '../cdp/cdp-api' -import { CdpFunctionCallbackConsumer, CdpOverflowConsumer, CdpProcessedEventsConsumer } from '../cdp/cdp-consumers' +import { + CdpCyclotronWorker, + CdpFunctionCallbackConsumer, + CdpOverflowConsumer, + CdpProcessedEventsConsumer, +} from '../cdp/cdp-consumers' import { defaultConfig, sessionRecordingConsumerConfig } from '../config/config' import { Hub, PluginServerCapabilities, PluginsServerConfig } from '../types' import { createHub, createKafkaClient, createKafkaProducerWrapper } from '../utils/db/hub' @@ -571,6 +576,17 @@ export async function startPluginsServer( healthChecks['cdp-overflow'] = () => consumer.isHealthy() ?? false } + if (capabilities.cdpCyclotronWorker) { + ;[hub, closeHub] = hub ? [hub, closeHub] : await createHub(serverConfig, capabilities) + if (hub.CYCLOTRON_DATABASE_URL) { + const worker = new CdpCyclotronWorker(hub) + await worker.start() + } else { + // This is a temporary solution until we *require* Cyclotron to be configured. + status.warn('💥', 'CYCLOTRON_DATABASE_URL is not set, not running Cyclotron worker') + } + } + if (capabilities.http) { const app = setupCommonRoutes(healthChecks, analyticsEventsIngestionConsumer) diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts index 953d45a56bcf6..1d596f034d81e 100644 --- a/plugin-server/src/types.ts +++ b/plugin-server/src/types.ts @@ -85,6 +85,7 @@ export enum PluginServerMode { cdp_processed_events = 'cdp-processed-events', cdp_function_callbacks = 'cdp-function-callbacks', cdp_function_overflow = 'cdp-function-overflow', + cdp_cyclotron_worker = 'cdp-cyclotron-worker', functional_tests = 'functional-tests', } @@ -107,6 +108,7 @@ export type CdpConfig = { CDP_WATCHER_DISABLED_TEMPORARY_TTL: number // How long a function should be temporarily disabled for CDP_WATCHER_DISABLED_TEMPORARY_MAX_COUNT: number // How many times a function can be disabled before it is disabled permanently CDP_ASYNC_FUNCTIONS_RUSTY_HOOK_TEAMS: string + CDP_ASYNC_FUNCTIONS_CYCLOTRON_TEAMS: string CDP_REDIS_HOST: string CDP_REDIS_PORT: number CDP_REDIS_PASSWORD: string @@ -279,6 +281,8 @@ export interface PluginsServerConfig extends CdpConfig { // kafka debug stats interval SESSION_RECORDING_KAFKA_CONSUMPTION_STATISTICS_EVENT_INTERVAL_MS: number + + CYCLOTRON_DATABASE_URL: string } export interface Hub extends PluginsServerConfig { @@ -345,6 +349,7 @@ export interface PluginServerCapabilities { cdpProcessedEvents?: boolean cdpFunctionCallbacks?: boolean cdpFunctionOverflow?: boolean + cdpCyclotronWorker?: boolean appManagementSingleton?: boolean preflightSchedules?: boolean // Used for instance health checks on hobby deploy, not useful on cloud http?: boolean diff --git a/plugin-server/tests/cdp/cdp-function-callbacks-consumer.test.ts b/plugin-server/tests/cdp/cdp-function-callbacks-consumer.test.ts index 54f052fe439d4..33559d108ace9 100644 --- a/plugin-server/tests/cdp/cdp-function-callbacks-consumer.test.ts +++ b/plugin-server/tests/cdp/cdp-function-callbacks-consumer.test.ts @@ -111,6 +111,14 @@ describe('CDP Processed Events Consuner', () => { $lib_version: '1.0.0', }, }, + groups: {}, + person: { + uuid: 'b3a1fe86-b10c-43cc-acaf-d208977608d0', + distinct_ids: ['b3a1fe86-b10c-43cc-acaf-d208977608d0'], + properties: { + email: 'test@posthog.com', + }, + }, } beforeEach(async () => { @@ -161,7 +169,7 @@ describe('CDP Processed Events Consuner', () => { }, { level: 'debug', - message: "Suspending function due to async function call 'fetch'. Payload: 1140 bytes", + message: "Suspending function due to async function call 'fetch'. Payload: 1331 bytes", }, { level: 'info', @@ -209,7 +217,7 @@ describe('CDP Processed Events Consuner', () => { }, { level: 'debug', - message: "Suspending function due to async function call 'fetch'. Payload: 1140 bytes", + message: "Suspending function due to async function call 'fetch'. Payload: 1331 bytes", }, { level: 'debug', diff --git a/plugin-server/tests/cdp/fixtures.ts b/plugin-server/tests/cdp/fixtures.ts index b70af8efe3c39..8f3fed7da62dc 100644 --- a/plugin-server/tests/cdp/fixtures.ts +++ b/plugin-server/tests/cdp/fixtures.ts @@ -100,6 +100,15 @@ export const createHogExecutionGlobals = ( ): HogFunctionInvocationGlobals => { return { ...data, + person: { + uuid: 'uuid', + name: 'test', + url: 'http://localhost:8000/persons/1', + properties: { + $lib_version: '1.2.3', + }, + ...(data.person ?? {}), + }, project: { id: 1, name: 'test', diff --git a/plugin-server/tests/cdp/hog-executor.test.ts b/plugin-server/tests/cdp/hog-executor.test.ts index ae5203f6415b0..e8ac3d365cd9a 100644 --- a/plugin-server/tests/cdp/hog-executor.test.ts +++ b/plugin-server/tests/cdp/hog-executor.test.ts @@ -58,9 +58,9 @@ describe('Hog Executor', () => { }) it('can execute messages', () => { - const globals = createHogExecutionGlobals() + const globals = createHogExecutionGlobals({ groups: {} }) const results = executor - .findMatchingFunctions(createHogExecutionGlobals()) + .findMatchingFunctions(createHogExecutionGlobals({ groups: {} })) .matchingFunctions.map((x) => executor.executeFunction(globals, x) as HogFunctionInvocationResult) expect(results).toHaveLength(1) expect(results[0]).toMatchObject({ @@ -74,9 +74,9 @@ describe('Hog Executor', () => { }) it('collects logs from the function', () => { - const globals = createHogExecutionGlobals() + const globals = createHogExecutionGlobals({ groups: {} }) const results = executor - .findMatchingFunctions(createHogExecutionGlobals()) + .findMatchingFunctions(createHogExecutionGlobals({ groups: {} })) .matchingFunctions.map((x) => executor.executeFunction(globals, x) as HogFunctionInvocationResult) expect(results[0].logs).toMatchObject([ { @@ -87,7 +87,7 @@ describe('Hog Executor', () => { { timestamp: expect.any(DateTime), level: 'debug', - message: "Suspending function due to async function call 'fetch'. Payload: 1299 bytes", + message: "Suspending function due to async function call 'fetch'. Payload: 1456 bytes", }, ]) }) @@ -100,7 +100,10 @@ describe('Hog Executor', () => { mockFunctionManager.getTeamHogFunctions.mockReturnValue([fn]) - const result = executor.executeFunction(createHogExecutionGlobals(), fn) as HogFunctionInvocationResult + const result = executor.executeFunction( + createHogExecutionGlobals({ groups: {} }), + fn + ) as HogFunctionInvocationResult expect(result.logs.map((x) => x.message)).toMatchInlineSnapshot(` Array [ "Executing function", @@ -115,9 +118,9 @@ describe('Hog Executor', () => { }) it('queues up an async function call', () => { - const globals = createHogExecutionGlobals() + const globals = createHogExecutionGlobals({ groups: {} }) const results = executor - .findMatchingFunctions(createHogExecutionGlobals()) + .findMatchingFunctions(createHogExecutionGlobals({ groups: {} })) .matchingFunctions.map((x) => executor.executeFunction(globals, x) as HogFunctionInvocationResult) expect(results[0]).toMatchObject({ invocation: { @@ -163,9 +166,14 @@ describe('Hog Executor', () => { properties: { $lib_version: '1.2.3' }, timestamp: '2024-06-07T12:00:00.000Z', }, - groups: null, + groups: {}, nested: { foo: 'http://localhost:8000/events/1' }, - person: null, + person: { + uuid: 'uuid', + name: 'test', + url: 'http://localhost:8000/persons/1', + properties: { $lib_version: '1.2.3' }, + }, event_url: 'http://localhost:8000/events/1-test', }, method: 'POST', @@ -177,9 +185,9 @@ describe('Hog Executor', () => { it('executes the full function in a loop', () => { const logs: LogEntry[] = [] - const globals = createHogExecutionGlobals() + const globals = createHogExecutionGlobals({ groups: {} }) const results = executor - .findMatchingFunctions(createHogExecutionGlobals()) + .findMatchingFunctions(createHogExecutionGlobals({ groups: {} })) .matchingFunctions.map((x) => executor.executeFunction(globals, x) as HogFunctionInvocationResult) const splicedLogs = results[0].logs.splice(0, 100) logs.push(...splicedLogs) @@ -191,18 +199,18 @@ describe('Hog Executor', () => { expect(asyncExecResult.finished).toBe(true) expect(logs.map((log) => log.message)).toEqual([ 'Executing function', - "Suspending function due to async function call 'fetch'. Payload: 1299 bytes", + "Suspending function due to async function call 'fetch'. Payload: 1456 bytes", 'Resuming function', 'Fetch response:, {"status":200,"body":"success"}', - 'Function completed in 100ms. Sync: 0ms. Mem: 589 bytes. Ops: 22.', + 'Function completed in 100ms. Sync: 0ms. Mem: 746 bytes. Ops: 22.', ]) }) it('parses the responses body if a string', () => { const logs: LogEntry[] = [] - const globals = createHogExecutionGlobals() + const globals = createHogExecutionGlobals({ groups: {} }) const results = executor - .findMatchingFunctions(createHogExecutionGlobals()) + .findMatchingFunctions(createHogExecutionGlobals({ groups: {} })) .matchingFunctions.map((x) => executor.executeFunction(globals, x) as HogFunctionInvocationResult) const splicedLogs = results[0].logs.splice(0, 100) logs.push(...splicedLogs) @@ -219,10 +227,10 @@ describe('Hog Executor', () => { expect(asyncExecResult.finished).toBe(true) expect(logs.map((log) => log.message)).toEqual([ 'Executing function', - "Suspending function due to async function call 'fetch'. Payload: 1299 bytes", + "Suspending function due to async function call 'fetch'. Payload: 1456 bytes", 'Resuming function', 'Fetch response:, {"status":200,"body":{"foo":"bar"}}', // The body is parsed - 'Function completed in 100ms. Sync: 0ms. Mem: 589 bytes. Ops: 22.', + 'Function completed in 100ms. Sync: 0ms. Mem: 746 bytes. Ops: 22.', ]) }) }) @@ -237,12 +245,13 @@ describe('Hog Executor', () => { mockFunctionManager.getTeamHogFunctions.mockReturnValue([fn]) - const resultsShouldntMatch = executor.findMatchingFunctions(createHogExecutionGlobals()) + const resultsShouldntMatch = executor.findMatchingFunctions(createHogExecutionGlobals({ groups: {} })) expect(resultsShouldntMatch.matchingFunctions).toHaveLength(0) expect(resultsShouldntMatch.nonMatchingFunctions).toHaveLength(1) const resultsShouldMatch = executor.findMatchingFunctions( createHogExecutionGlobals({ + groups: {}, event: { name: '$pageview', properties: { @@ -267,9 +276,9 @@ describe('Hog Executor', () => { mockFunctionManager.getTeamHogFunctions.mockReturnValue([fn]) // Simulate the recusive loop - const globals = createHogExecutionGlobals() + const globals = createHogExecutionGlobals({ groups: {} }) const results = executor - .findMatchingFunctions(createHogExecutionGlobals()) + .findMatchingFunctions(createHogExecutionGlobals({ groups: {} })) .matchingFunctions.map((x) => executor.executeFunction(globals, x) as HogFunctionInvocationResult) expect(results).toHaveLength(1) @@ -305,9 +314,9 @@ describe('Hog Executor', () => { mockFunctionManager.getTeamHogFunctions.mockReturnValue([fn]) - const globals = createHogExecutionGlobals() + const globals = createHogExecutionGlobals({ groups: {} }) const results = executor - .findMatchingFunctions(createHogExecutionGlobals()) + .findMatchingFunctions(createHogExecutionGlobals({ groups: {} })) .matchingFunctions.map((x) => executor.executeFunction(globals, x) as HogFunctionInvocationResult) expect(results).toHaveLength(1) expect(results[0].error).toContain('Execution timed out after 0.1 seconds. Performed ') @@ -339,7 +348,7 @@ describe('Hog Executor', () => { ...HOG_FILTERS_EXAMPLES.no_filters, }) - const globals = createHogExecutionGlobals() + const globals = createHogExecutionGlobals({ groups: {} }) const result = executor.executeFunction(globals, fn) expect(result?.capturedPostHogEvents).toEqual([ { @@ -362,6 +371,7 @@ describe('Hog Executor', () => { }) const globals = createHogExecutionGlobals({ + groups: {}, event: { properties: { $hog_function_execution_count: 1, diff --git a/plugin-server/tests/server.test.ts b/plugin-server/tests/server.test.ts index 52fe0b989bf40..009416547b36d 100644 --- a/plugin-server/tests/server.test.ts +++ b/plugin-server/tests/server.test.ts @@ -97,6 +97,7 @@ describe('server', () => { cdpProcessedEvents: true, cdpFunctionCallbacks: true, cdpFunctionOverflow: true, + cdpCyclotronWorker: true, syncInlinePlugins: true, } ) diff --git a/posthog/api/__init__.py b/posthog/api/__init__.py index 1e6af969cbafd..9d61824e95d13 100644 --- a/posthog/api/__init__.py +++ b/posthog/api/__init__.py @@ -383,6 +383,20 @@ def api_not_found(request): ["team_id", "insight_id"], ) +project_insights_router.register( + "thresholds", + alert.ThresholdViewSet, + "project_insight_thresholds", + ["team_id", "insight_id"], +) + +project_insights_router.register( + "alerts", + alert.AlertViewSet, + "project_insight_alerts", + ["team_id", "insight_id"], +) + project_session_recordings_router.register( r"sharing", sharing.SharingConfigurationViewSet, diff --git a/posthog/api/activity_log.py b/posthog/api/activity_log.py index 9b7e71614ebda..6f85480260722 100644 --- a/posthog/api/activity_log.py +++ b/posthog/api/activity_log.py @@ -4,7 +4,7 @@ from django.db.models import Q, QuerySet from rest_framework import serializers, status, viewsets, pagination, mixins -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ValidationError from rest_framework.request import Request from rest_framework.response import Response diff --git a/posthog/api/alert.py b/posthog/api/alert.py index 8560b0304e198..0a655a8bd3d9d 100644 --- a/posthog/api/alert.py +++ b/posthog/api/alert.py @@ -1,37 +1,208 @@ from rest_framework import serializers, viewsets from rest_framework.exceptions import ValidationError +from rest_framework.response import Response from django.db.models import QuerySet from posthog.api.routing import TeamAndOrgViewSetMixin -from posthog.models.alert import Alert +from posthog.api.shared import UserBasicSerializer +from posthog.models import User +from posthog.models.alert import ( + AlertConfiguration, + AlertCheck, + Threshold, + AlertSubscription, + are_alerts_supported_for_insight, +) + + +class ThresholdSerializer(serializers.ModelSerializer): + class Meta: + model = Threshold + fields = [ + "id", + "created_at", + "name", + "configuration", + ] + read_only_fields = [ + "id", + "created_at", + ] + + def validate(self, data): + instance = Threshold(**data) + instance.clean() + return data + + +class AlertCheckSerializer(serializers.ModelSerializer): + targets_notified = serializers.SerializerMethodField() + + class Meta: + model = AlertCheck + fields = [ + "id", + "created_at", + "calculated_value", + "state", + "targets_notified", + ] + read_only_fields = fields + + def get_targets_notified(self, instance: AlertCheck) -> bool: + return instance.targets_notified != {} + + +class AlertSubscriptionSerializer(serializers.ModelSerializer): + user = serializers.PrimaryKeyRelatedField(queryset=User.objects.filter(is_active=True), required=True) + + class Meta: + model = AlertSubscription + fields = ["id", "user", "alert_configuration"] + read_only_fields = ["id", "alert_configuration"] + + def validate(self, data): + user: User = data["user"] + alert_configuration = data["alert_configuration"] + + if not user.teams.filter(pk=alert_configuration.team_id).exists(): + raise serializers.ValidationError("User does not belong to the same organization as the alert's team.") + + return data class AlertSerializer(serializers.ModelSerializer): + created_by = UserBasicSerializer(read_only=True) + checks = AlertCheckSerializer(many=True, read_only=True) + threshold = ThresholdSerializer() + subscribed_users = serializers.PrimaryKeyRelatedField( + queryset=User.objects.filter(is_active=True), + many=True, + required=True, + write_only=True, + allow_empty=False, + ) + class Meta: - model = Alert + model = AlertConfiguration fields = [ "id", + "created_by", + "created_at", "insight", "name", - "target_value", - "anomaly_condition", + "subscribed_users", + "threshold", + "condition", + "state", + "enabled", + "last_notified_at", + "checks", + ] + read_only_fields = [ + "id", + "created_at", + "state", + "last_notified_at", ] - read_only_fields = ["id"] - def create(self, validated_data: dict) -> Alert: + def to_representation(self, instance): + data = super().to_representation(instance) + data["subscribed_users"] = UserBasicSerializer(instance.subscribed_users.all(), many=True, read_only=True).data + return data + + def add_threshold(self, threshold_data, validated_data): + threshold_instance = Threshold.objects.create( + **threshold_data, + team_id=self.context["team_id"], + created_by=self.context["request"].user, + insight_id=validated_data["insight"].id, + ) + return threshold_instance + + def create(self, validated_data: dict) -> AlertConfiguration: validated_data["team_id"] = self.context["team_id"] - instance: Alert = super().create(validated_data) + validated_data["created_by"] = self.context["request"].user + subscribed_users = validated_data.pop("subscribed_users") + threshold_data = validated_data.pop("threshold", None) + + if threshold_data: + threshold_instance = self.add_threshold(threshold_data, validated_data) + validated_data["threshold"] = threshold_instance + + instance: AlertConfiguration = super().create(validated_data) + + for user in subscribed_users: + AlertSubscription.objects.create( + user=user, alert_configuration=instance, created_by=self.context["request"].user + ) + return instance + def update(self, instance, validated_data): + conditions_or_threshold_changed = False + + threshold_data = validated_data.pop("threshold", None) + if threshold_data is not None: + if threshold_data == {}: + instance.threshold = None + conditions_or_threshold_changed = True + elif instance.threshold: + previous_threshold_configuration = instance.threshold.configuration + threshold_instance = instance.threshold + for key, value in threshold_data.items(): + setattr(threshold_instance, key, value) + threshold_instance.save() + if previous_threshold_configuration != threshold_instance.configuration: + conditions_or_threshold_changed = True + else: + threshold_instance = self.add_threshold(threshold_data, validated_data) + validated_data["threshold"] = threshold_instance + conditions_or_threshold_changed = True + + subscribed_users = validated_data.pop("subscribed_users", None) + if subscribed_users is not None: + AlertSubscription.objects.filter(alert_configuration=instance).exclude(user__in=subscribed_users).delete() + for user in subscribed_users: + AlertSubscription.objects.get_or_create( + user=user, alert_configuration=instance, defaults={"created_by": self.context["request"].user} + ) + + if conditions_or_threshold_changed: + # If anything changed we set inactive, so it's firing and notifying with the new settings + instance.state = "inactive" + + return super().update(instance, validated_data) + + def validate_insight(self, value): + if value and not are_alerts_supported_for_insight(value): + raise ValidationError("Alerts are not supported for this insight.") + return value + + def validate_subscribed_users(self, value): + for user in value: + if not user.teams.filter(pk=self.context["team_id"]).exists(): + raise ValidationError("User does not belong to the same organization as the alert's team.") + return value + def validate(self, attrs): if attrs.get("insight") and attrs["insight"].team.id != self.context["team_id"]: raise ValidationError({"insight": ["This insight does not belong to your team."]}) + + if attrs.get("enabled") is not False and ( + AlertConfiguration.objects.filter(team_id=self.context["team_id"], enabled=True).count() + >= AlertConfiguration.ALERTS_PER_TEAM + ): + raise ValidationError( + {"alert": [f"Your team has reached the limit of {AlertConfiguration.ALERTS_PER_TEAM} enabled alerts."]} + ) + return attrs class AlertViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): scope_object = "INTERNAL" - queryset = Alert.objects.all() + queryset = AlertConfiguration.objects.all().order_by("-created_at") serializer_class = AlertSerializer def safely_get_queryset(self, queryset) -> QuerySet: @@ -39,3 +210,23 @@ def safely_get_queryset(self, queryset) -> QuerySet: if "insight" in filters: queryset = queryset.filter(insight_id=filters["insight"]) return queryset + + def retrieve(self, request, *args, **kwargs): + instance = self.get_object() + instance.checks = instance.alertcheck_set.all().order_by("-created_at")[:5] + serializer = self.get_serializer(instance) + return Response(serializer.data) + + +class ThresholdWithAlertSerializer(ThresholdSerializer): + alerts = AlertSerializer(many=True, read_only=True, source="alertconfiguration_set") + + class Meta(ThresholdSerializer.Meta): + fields = [*ThresholdSerializer.Meta.fields, "alerts"] + read_only_fields = [*ThresholdSerializer.Meta.read_only_fields, "alerts"] + + +class ThresholdViewSet(TeamAndOrgViewSetMixin, viewsets.ReadOnlyModelViewSet): + scope_object = "INTERNAL" + queryset = Threshold.objects.all() + serializer_class = ThresholdWithAlertSerializer diff --git a/posthog/api/app_metrics.py b/posthog/api/app_metrics.py index 12d5483873768..a362a2302fea0 100644 --- a/posthog/api/app_metrics.py +++ b/posthog/api/app_metrics.py @@ -5,7 +5,7 @@ from django.db.models import Count, Q from django.db.models.functions import TruncDay from rest_framework import mixins, request, response, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.models import BatchExportRun diff --git a/posthog/api/app_metrics2.py b/posthog/api/app_metrics2.py index 048413967cdbb..6b9ef8a451fee 100644 --- a/posthog/api/app_metrics2.py +++ b/posthog/api/app_metrics2.py @@ -4,7 +4,7 @@ from rest_framework import serializers, viewsets from rest_framework.request import Request from rest_framework.response import Response -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ValidationError from rest_framework_dataclasses.serializers import DataclassSerializer diff --git a/posthog/api/async_migration.py b/posthog/api/async_migration.py index 8c76999bbe0bf..9647d49d745c0 100644 --- a/posthog/api/async_migration.py +++ b/posthog/api/async_migration.py @@ -1,6 +1,6 @@ import structlog from rest_framework import response, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from semantic_version.base import Version from posthog.async_migrations.runner import ( diff --git a/posthog/api/cohort.py b/posthog/api/cohort.py index c7ca05a863667..543ef9825a749 100644 --- a/posthog/api/cohort.py +++ b/posthog/api/cohort.py @@ -26,7 +26,7 @@ from django.db.models.expressions import F from django.utils import timezone from rest_framework import serializers, viewsets, request, status -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ValidationError from rest_framework.request import Request from rest_framework.response import Response diff --git a/posthog/api/comments.py b/posthog/api/comments.py index 06443f92b2fcc..bb1ac5a158e97 100644 --- a/posthog/api/comments.py +++ b/posthog/api/comments.py @@ -3,7 +3,7 @@ from django.db.models import QuerySet from rest_framework import exceptions, serializers, viewsets, pagination -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.request import Request from rest_framework.response import Response diff --git a/posthog/api/dashboards/dashboard.py b/posthog/api/dashboards/dashboard.py index 8462aec010f45..136824cfdf8b5 100644 --- a/posthog/api/dashboards/dashboard.py +++ b/posthog/api/dashboards/dashboard.py @@ -6,7 +6,7 @@ from django.shortcuts import get_object_or_404 from django.utils.timezone import now from rest_framework import exceptions, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.permissions import SAFE_METHODS, BasePermission from rest_framework.request import Request from rest_framework.response import Response @@ -18,6 +18,7 @@ ) from posthog.api.forbid_destroy_model import ForbidDestroyModel from posthog.api.insight import InsightSerializer, InsightViewSet +from posthog.api.monitoring import monitor, Feature from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import UserBasicSerializer from posthog.api.tagged_item import TaggedItemSerializerMixin, TaggedItemViewSetMixin @@ -29,7 +30,7 @@ from posthog.models.tagged_item import TaggedItem from posthog.models.user import User from posthog.user_permissions import UserPermissionsSerializerMixin -from posthog.api.monitoring import monitor, Feature +from posthog.utils import filters_override_requested_by_client logger = structlog.get_logger(__name__) @@ -124,6 +125,7 @@ def get_effective_privilege_level(self, dashboard: Dashboard) -> Dashboard.Privi class DashboardSerializer(DashboardBasicSerializer): tiles = serializers.SerializerMethodField() + filters = serializers.SerializerMethodField() created_by = UserBasicSerializer(read_only=True) use_template = serializers.CharField(write_only=True, allow_blank=True, required=False) use_dashboard = serializers.IntegerField(write_only=True, allow_null=True, required=False) @@ -172,7 +174,15 @@ def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Dashboard: validated_data.pop("delete_insights", None) # not used during creation validated_data = self._update_creation_mode(validated_data, use_template, use_dashboard) tags = validated_data.pop("tags", None) # tags are created separately below as global tag relationships - dashboard = Dashboard.objects.create(team_id=team_id, **validated_data) + + request_filters = request.data.get("filters") + if request_filters: + if not isinstance(request_filters, dict): + raise serializers.ValidationError("Filters must be a dictionary") + filters = request_filters + else: + filters = {} + dashboard = Dashboard.objects.create(team_id=team_id, filters=filters, **validated_data) if use_template: try: @@ -281,6 +291,12 @@ def update(self, instance: Dashboard, validated_data: dict, *args: Any, **kwargs if validated_data.get("deleted", False): self._delete_related_tiles(instance, self.validated_data.get("delete_insights", False)) + request_filters = initial_data.get("filters") + if request_filters: + if not isinstance(request_filters, dict): + raise serializers.ValidationError("Filters must be a dictionary") + instance.filters = request_filters + instance = super().update(instance, validated_data) user = cast(User, self.context["request"].user) @@ -380,6 +396,16 @@ def get_tiles(self, dashboard: Dashboard) -> Optional[list[ReturnDict]]: return serialized_tiles + def get_filters(self, dashboard: Dashboard) -> dict: + request = self.context.get("request") + if request: + filters_override = filters_override_requested_by_client(request) + + if filters_override is not None: + return filters_override + + return dashboard.filters + def validate(self, data): if data.get("use_dashboard", None) and data.get("use_template", None): raise serializers.ValidationError("`use_dashboard` and `use_template` cannot be used together") diff --git a/posthog/api/dashboards/dashboard_templates.py b/posthog/api/dashboards/dashboard_templates.py index 0f9e3d2399e3b..39941ff8b17fe 100644 --- a/posthog/api/dashboards/dashboard_templates.py +++ b/posthog/api/dashboards/dashboard_templates.py @@ -6,7 +6,7 @@ from django.utils.decorators import method_decorator from django.views.decorators.cache import cache_page from rest_framework import request, response, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ValidationError from rest_framework.permissions import SAFE_METHODS, BasePermission from rest_framework.request import Request diff --git a/posthog/api/data_management.py b/posthog/api/data_management.py index c0e96618ea983..0f3e900a1a50d 100644 --- a/posthog/api/data_management.py +++ b/posthog/api/data_management.py @@ -1,5 +1,5 @@ from rest_framework import viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.models.activity_logging.activity_page import activity_page_response diff --git a/posthog/api/element.py b/posthog/api/element.py index 5c3af7aa135dc..0b3809ed0f12c 100644 --- a/posthog/api/element.py +++ b/posthog/api/element.py @@ -1,7 +1,7 @@ from typing import Literal from rest_framework import request, response, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ValidationError from statshog.defaults.django import statsd diff --git a/posthog/api/error_tracking.py b/posthog/api/error_tracking.py index 6ac69d1fad090..7538c93d7248d 100644 --- a/posthog/api/error_tracking.py +++ b/posthog/api/error_tracking.py @@ -4,7 +4,7 @@ from posthog.api.forbid_destroy_model import ForbidDestroyModel from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.models.error_tracking import ErrorTrackingGroup -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.response import Response from django.utils.http import urlsafe_base64_decode import json diff --git a/posthog/api/event.py b/posthog/api/event.py index 74a587f974867..da044b10c7c42 100644 --- a/posthog/api/event.py +++ b/posthog/api/event.py @@ -7,7 +7,7 @@ from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiParameter from rest_framework import mixins, request, response, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import NotFound from rest_framework.pagination import LimitOffsetPagination from rest_framework.settings import api_settings diff --git a/posthog/api/exports.py b/posthog/api/exports.py index acb77d225c31a..352573948c182 100644 --- a/posthog/api/exports.py +++ b/posthog/api/exports.py @@ -5,7 +5,7 @@ from django.http import HttpResponse from django.utils.timezone import now from rest_framework import mixins, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ValidationError from rest_framework.request import Request diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py index 5a82c6d30bbe4..4536de64850bd 100644 --- a/posthog/api/feature_flag.py +++ b/posthog/api/feature_flag.py @@ -11,7 +11,7 @@ status, viewsets, ) -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.permissions import SAFE_METHODS, BasePermission from rest_framework.request import Request from rest_framework.response import Response diff --git a/posthog/api/hog_function.py b/posthog/api/hog_function.py index e7b1798920a58..4d6693d92c9d4 100644 --- a/posthog/api/hog_function.py +++ b/posthog/api/hog_function.py @@ -6,7 +6,7 @@ from rest_framework import serializers, viewsets, exceptions from rest_framework.serializers import BaseSerializer -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.request import Request from rest_framework.response import Response diff --git a/posthog/api/insight.py b/posthog/api/insight.py index 0406cd6e1abce..6bf91e1649672 100644 --- a/posthog/api/insight.py +++ b/posthog/api/insight.py @@ -15,12 +15,13 @@ from loginas.utils import is_impersonated_session from prometheus_client import Counter from rest_framework import request, serializers, status, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ParseError, PermissionDenied, ValidationError from rest_framework.parsers import JSONParser from rest_framework.response import Response from rest_framework.settings import api_settings from rest_framework_csv import renderers as csvrenderers +from rest_framework.request import Request from posthog import schema from posthog.api.documentation import extend_schema @@ -102,6 +103,7 @@ refresh_requested_by_client, relative_date_parse, str_to_bool, + filters_override_requested_by_client, ) from posthog.api.monitoring import monitor, Feature @@ -404,7 +406,7 @@ def update(self, instance: Insight, validated_data: dict, **kwargs) -> Insight: updated_insight = super().update(instance, validated_data) if not are_alerts_supported_for_insight(updated_insight): - instance.alert_set.all().delete() + instance.alertconfiguration_set.all().delete() self._log_insight_update(before_update, dashboards_before_change, updated_insight) @@ -550,6 +552,9 @@ def to_representation(self, instance: Insight): representation["dashboards"] = [tile["dashboard_id"] for tile in representation["dashboard_tiles"]] dashboard: Optional[Dashboard] = self.context.get("dashboard") + request: Optional[Request] = self.context.get("request") + dashboard_filters_override = filters_override_requested_by_client(request) if request else None + if hogql_insights_replace_filters(instance.team) and ( instance.query is not None or instance.query_from_filters is not None ): @@ -559,12 +564,20 @@ def to_representation(self, instance: Insight): query = instance.query or instance.query_from_filters if dashboard: - query = apply_dashboard_filters_to_dict(query, dashboard.filters, instance.team) + query = apply_dashboard_filters_to_dict( + query, + dashboard_filters_override if dashboard_filters_override is not None else dashboard.filters, + instance.team, + ) representation["filters"] = {} representation["query"] = query else: - representation["filters"] = instance.dashboard_filters(dashboard=dashboard) - representation["query"] = instance.get_effective_query(dashboard=dashboard) + representation["filters"] = instance.dashboard_filters( + dashboard=dashboard, dashboard_filters_override=dashboard_filters_override + ) + representation["query"] = instance.get_effective_query( + dashboard=dashboard, dashboard_filters_override=dashboard_filters_override + ) if "insight" not in representation["filters"] and not representation["query"]: representation["filters"]["insight"] = "TRENDS" @@ -583,6 +596,7 @@ def insight_result(self, insight: Insight) -> InsightResult: try: refresh_requested = refresh_requested_by_client(self.context["request"]) execution_mode = execution_mode_from_refresh(refresh_requested) + filters_override = filters_override_requested_by_client(self.context["request"]) if self.context.get("is_shared", False): execution_mode = shared_insights_execution_mode(execution_mode) @@ -592,6 +606,7 @@ def insight_result(self, insight: Insight) -> InsightResult: dashboard=dashboard, execution_mode=execution_mode, user=self.context["request"].user, + filters_override=filters_override, ) except ExposedHogQLError as e: raise ValidationError(str(e)) diff --git a/posthog/api/instance_settings.py b/posthog/api/instance_settings.py index 13c1461ba5655..f7c8572919a3d 100644 --- a/posthog/api/instance_settings.py +++ b/posthog/api/instance_settings.py @@ -21,10 +21,10 @@ def cast_str_to_desired_type(str_value: str, target_type: type) -> Any: - if target_type == int: + if target_type is int: return int(str_value) - if target_type == bool: + if target_type is bool: return str_to_bool(str_value) return str_value @@ -81,7 +81,7 @@ def update(self, instance: InstanceSettingHelper, validated_data: dict[str, Any] raise serializers.ValidationError({"value": "This field is required."}, code="required") target_type: type = CONSTANCE_CONFIG[instance.key][2] - if target_type == bool and isinstance(validated_data["value"], bool): + if target_type is bool and isinstance(validated_data["value"], bool): new_value_parsed = validated_data["value"] else: new_value_parsed = cast_str_to_desired_type(validated_data["value"], target_type) diff --git a/posthog/api/instance_status.py b/posthog/api/instance_status.py index 1e001b74703be..fd26a32bc1b28 100644 --- a/posthog/api/instance_status.py +++ b/posthog/api/instance_status.py @@ -5,7 +5,7 @@ from django.utils.decorators import method_decorator from django.views.decorators.cache import cache_page from rest_framework import viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.permissions import IsAuthenticated from rest_framework.request import Request from rest_framework.response import Response diff --git a/posthog/api/integration.py b/posthog/api/integration.py index db9a9f92cbbc8..6c343633ff7da 100644 --- a/posthog/api/integration.py +++ b/posthog/api/integration.py @@ -3,7 +3,7 @@ from django.http import HttpResponse from django.shortcuts import redirect from rest_framework import mixins, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ValidationError from rest_framework.request import Request from rest_framework.response import Response diff --git a/posthog/api/kafka_inspector.py b/posthog/api/kafka_inspector.py index 24865e47d40d3..e966c3e374394 100644 --- a/posthog/api/kafka_inspector.py +++ b/posthog/api/kafka_inspector.py @@ -2,7 +2,7 @@ from kafka import TopicPartition from rest_framework import serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.response import Response from posthog.kafka_client.client import build_kafka_consumer diff --git a/posthog/api/log_entries.py b/posthog/api/log_entries.py index 2dd9a1a92d5b2..3aa1666eb2aeb 100644 --- a/posthog/api/log_entries.py +++ b/posthog/api/log_entries.py @@ -4,7 +4,7 @@ from rest_framework import serializers, viewsets from rest_framework.request import Request from rest_framework.response import Response -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ValidationError from rest_framework_dataclasses.serializers import DataclassSerializer diff --git a/posthog/api/notebook.py b/posthog/api/notebook.py index 8ae15a0771f98..93e09ec90cd87 100644 --- a/posthog/api/notebook.py +++ b/posthog/api/notebook.py @@ -15,7 +15,7 @@ from rest_framework import serializers, viewsets from rest_framework.request import Request from rest_framework.response import Response -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.serializers import BaseSerializer from posthog.api.forbid_destroy_model import ForbidDestroyModel diff --git a/posthog/api/organization_domain.py b/posthog/api/organization_domain.py index 7b3dec6f9562d..e39b611c1fb32 100644 --- a/posthog/api/organization_domain.py +++ b/posthog/api/organization_domain.py @@ -2,7 +2,7 @@ from typing import Any, cast from rest_framework import exceptions, request, response, serializers -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.viewsets import ModelViewSet from posthog.api.routing import TeamAndOrgViewSetMixin diff --git a/posthog/api/organization_feature_flag.py b/posthog/api/organization_feature_flag.py index d2468cb07ce12..d91ec15ba1c41 100644 --- a/posthog/api/organization_feature_flag.py +++ b/posthog/api/organization_feature_flag.py @@ -1,6 +1,6 @@ from django.core.exceptions import ObjectDoesNotExist from rest_framework.response import Response -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework import ( mixins, viewsets, diff --git a/posthog/api/organization_invite.py b/posthog/api/organization_invite.py index b3a22fe1bf599..6acbf00f1b95c 100644 --- a/posthog/api/organization_invite.py +++ b/posthog/api/organization_invite.py @@ -9,7 +9,7 @@ status, viewsets, ) -from rest_framework.decorators import action +from posthog.api.utils import action from ee.models.explicit_team_membership import ExplicitTeamMembership from posthog.api.routing import TeamAndOrgViewSetMixin diff --git a/posthog/api/person.py b/posthog/api/person.py index f9ed2ca921c49..b623d6a39bc66 100644 --- a/posthog/api/person.py +++ b/posthog/api/person.py @@ -18,7 +18,7 @@ from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiExample, OpenApiParameter from rest_framework import request, response, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import MethodNotAllowed, NotFound, ValidationError from rest_framework.pagination import LimitOffsetPagination from rest_framework.response import Response @@ -391,7 +391,7 @@ def destroy(self, request: request.Request, pk=None, **kwargs): except Person.DoesNotExist: raise NotFound(detail="Person not found.") - @action(methods=["GET"], detail=False) + @action(methods=["GET"], detail=False, required_scopes=["person:read"]) def values(self, request: request.Request, **kwargs) -> response.Response: key = request.GET.get("key") value = request.GET.get("value") @@ -434,7 +434,7 @@ def _get_person_property_values_for_key(self, key, value): return result - @action(methods=["POST"], detail=True) + @action(methods=["POST"], detail=True, required_scopes=["person:write"]) def split(self, request: request.Request, pk=None, **kwargs) -> response.Response: person: Person = self.get_object() distinct_ids = person.distinct_ids @@ -479,7 +479,7 @@ def split(self, request: request.Request, pk=None, **kwargs) -> response.Respons ), ] ) - @action(methods=["POST"], detail=True) + @action(methods=["POST"], detail=True, required_scopes=["person:write"]) def update_property(self, request: request.Request, pk=None, **kwargs) -> response.Response: if request.data.get("value") is None: return Response( @@ -514,7 +514,7 @@ def update_property(self, request: request.Request, pk=None, **kwargs) -> respon ), ] ) - @action(methods=["POST"], detail=True) + @action(methods=["POST"], detail=True, required_scopes=["person:write"]) def delete_property(self, request: request.Request, pk=None, **kwargs) -> response.Response: person: Person = get_pk_or_uuid(Person.objects.filter(team_id=self.team_id), pk).get() @@ -567,7 +567,7 @@ def cohorts(self, request: request.Request) -> response.Response: return response.Response({"results": CohortSerializer(cohorts, many=True).data}) - @action(methods=["GET"], url_path="activity", detail=False) + @action(methods=["GET"], url_path="activity", detail=False, required_scopes=["activity_log:read"]) def all_activity(self, request: request.Request, **kwargs): limit = int(request.query_params.get("limit", "10")) page = int(request.query_params.get("page", "1")) diff --git a/posthog/api/plugin.py b/posthog/api/plugin.py index af8d12f3cd628..bb19663d88589 100644 --- a/posthog/api/plugin.py +++ b/posthog/api/plugin.py @@ -15,13 +15,15 @@ from django.utils.timezone import now from loginas.utils import is_impersonated_session from rest_framework import renderers, request, serializers, status, viewsets -from rest_framework.decorators import action, renderer_classes +from rest_framework.decorators import renderer_classes from rest_framework.exceptions import NotFound, PermissionDenied, ValidationError from rest_framework.permissions import SAFE_METHODS, BasePermission, IsAuthenticated from rest_framework.response import Response +from posthog.api.hog_function import HogFunctionSerializer from posthog.api.routing import TeamAndOrgViewSetMixin -from posthog.api.utils import ClassicBehaviorBooleanFieldSerializer +from posthog.api.utils import ClassicBehaviorBooleanFieldSerializer, action +from posthog.cdp.templates import HOG_FUNCTION_MIGRATORS from posthog.models import Plugin, PluginAttachment, PluginConfig, User from posthog.models.activity_logging.activity_log import ( ActivityPage, @@ -254,6 +256,7 @@ def has_object_permission(self, request, view, object) -> bool: class PluginSerializer(serializers.ModelSerializer): url = serializers.SerializerMethodField() organization_name = serializers.SerializerMethodField() + hog_function_migration_available = serializers.SerializerMethodField() class Meta: model = Plugin @@ -273,8 +276,12 @@ class Meta: "capabilities", "metrics", "public_jobs", + "hog_function_migration_available", ] - read_only_fields = ["id", "latest_tag"] + read_only_fields = ["id", "latest_tag", "hog_function_migration_available"] + + def get_hog_function_migration_available(self, plugin: Plugin): + return HOG_FUNCTION_MIGRATORS.get(plugin.url) is not None if plugin.url else False def get_url(self, plugin: Plugin) -> Optional[str]: # remove ?private_token=... from url @@ -898,6 +905,29 @@ def activity(self, request: request.Request, **kwargs): return activity_page_response(activity_page, limit, page, request) + @action(methods=["POST"], url_path="migrate", detail=True) + def migrate(self, request: request.Request, **kwargs): + obj = self.get_object() + migrater = HOG_FUNCTION_MIGRATORS.get(obj.plugin.url) + + if not migrater: + raise ValidationError("No migration available for this plugin") + + hog_function_data = migrater.migrate(obj) + + if obj.enabled: + hog_function_data["enabled"] = True + + hog_function_serializer = HogFunctionSerializer(data=hog_function_data, context=self.get_serializer_context()) + hog_function_serializer.is_valid(raise_exception=True) + hog_function_serializer.save() + + if obj.enabled: + obj.enabled = False + obj.save() + + return Response(hog_function_serializer.data) + def _get_secret_fields_for_plugin(plugin: Plugin) -> set[str]: # A set of keys for config fields that have secret = true diff --git a/posthog/api/property_definition.py b/posthog/api/property_definition.py index 2bab343e5249f..84b7a03f030bd 100644 --- a/posthog/api/property_definition.py +++ b/posthog/api/property_definition.py @@ -5,7 +5,7 @@ from django.db import connection from loginas.utils import is_impersonated_session from rest_framework import mixins, request, response, serializers, status, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ValidationError from rest_framework.pagination import LimitOffsetPagination diff --git a/posthog/api/query.py b/posthog/api/query.py index e797b96c38ab7..8c71b1465017a 100644 --- a/posthog/api/query.py +++ b/posthog/api/query.py @@ -6,7 +6,7 @@ from pydantic import BaseModel from rest_framework import status from rest_framework import viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ValidationError, NotAuthenticated from rest_framework.request import Request from rest_framework.response import Response diff --git a/posthog/api/session.py b/posthog/api/session.py index 966cef8e18088..40a14a993a862 100644 --- a/posthog/api/session.py +++ b/posthog/api/session.py @@ -1,7 +1,7 @@ import json from rest_framework import request, response, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ValidationError from posthog.api.routing import TeamAndOrgViewSetMixin diff --git a/posthog/api/sharing.py b/posthog/api/sharing.py index 951a0216ed115..ef85e143152c2 100644 --- a/posthog/api/sharing.py +++ b/posthog/api/sharing.py @@ -17,7 +17,7 @@ from posthog.api.insight import InsightSerializer from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.clickhouse.client.async_task_chain import task_chain_context -from posthog.models import SessionRecording, SharingConfiguration, Team +from posthog.models import SessionRecording, SharingConfiguration, Team, InsightViewed from posthog.models.activity_logging.activity_log import Change, Detail, log_activity from posthog.models.dashboard import Dashboard from posthog.models.exported_asset import ( @@ -270,11 +270,16 @@ def retrieve(self, request: Request, *args: Any, **kwargs: Any) -> Any: context["dashboard"] = resource.dashboard asset_title = resource.insight.name or resource.insight.derived_name asset_description = resource.insight.description or "" + InsightViewed.objects.update_or_create( + insight=resource.insight, team=None, user=None, defaults={"last_viewed_at": now()} + ) insight_data = InsightSerializer(resource.insight, many=False, context=context).data exported_data.update({"insight": insight_data}) elif resource.dashboard and not resource.dashboard.deleted: asset_title = resource.dashboard.name asset_description = resource.dashboard.description or "" + resource.dashboard.last_accessed_at = now() + resource.dashboard.save(update_fields=["last_accessed_at"]) with task_chain_context(): dashboard_data = DashboardSerializer(resource.dashboard, context=context).data # We don't want the dashboard to be accidentally loaded via the shared endpoint diff --git a/posthog/api/survey.py b/posthog/api/survey.py index 893122aaae66a..f6f1dec80cb38 100644 --- a/posthog/api/survey.py +++ b/posthog/api/survey.py @@ -9,7 +9,7 @@ from django.views.decorators.csrf import csrf_exempt from nanoid import generate from rest_framework import request, serializers, status, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.request import Request from rest_framework.response import Response diff --git a/posthog/api/team.py b/posthog/api/team.py index 948ed77608b48..9f4e04e007eaf 100644 --- a/posthog/api/team.py +++ b/posthog/api/team.py @@ -9,7 +9,7 @@ from posthog.jwt import PosthogJwtAudience, encode_jwt from rest_framework.permissions import BasePermission, IsAuthenticated from rest_framework import exceptions, request, response, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from posthog.api.geoip import get_geoip_properties from posthog.api.routing import TeamAndOrgViewSetMixin diff --git a/posthog/api/test/__snapshots__/test_action.ambr b/posthog/api/test/__snapshots__/test_action.ambr index 03d20dc4a2b43..44b749ef70294 100644 --- a/posthog/api/test/__snapshots__/test_action.ambr +++ b/posthog/api/test/__snapshots__/test_action.ambr @@ -290,76 +290,6 @@ "posthog_action"."name" ASC ''' # --- -# name: TestActionApi.test_listing_actions_is_not_nplus1.15 - ''' - SELECT COUNT(*) - FROM - (SELECT "posthog_action"."id" AS "col1" - FROM "posthog_action" - LEFT OUTER JOIN "posthog_action_events" ON ("posthog_action"."id" = "posthog_action_events"."action_id") - WHERE (NOT "posthog_action"."deleted" - AND "posthog_action"."team_id" = 2 - AND "posthog_action"."team_id" = 2) - GROUP BY 1) subquery - ''' -# --- -# name: TestActionApi.test_listing_actions_is_not_nplus1.16 - ''' - SELECT "posthog_action"."id", - "posthog_action"."name", - "posthog_action"."team_id", - "posthog_action"."description", - "posthog_action"."created_at", - "posthog_action"."created_by_id", - "posthog_action"."deleted", - "posthog_action"."post_to_slack", - "posthog_action"."slack_message_format", - "posthog_action"."updated_at", - "posthog_action"."bytecode", - "posthog_action"."bytecode_error", - "posthog_action"."steps_json", - "posthog_action"."is_calculating", - "posthog_action"."last_calculated_at", - COUNT("posthog_action_events"."event_id") AS "count", - "posthog_user"."id", - "posthog_user"."password", - "posthog_user"."last_login", - "posthog_user"."first_name", - "posthog_user"."last_name", - "posthog_user"."is_staff", - "posthog_user"."is_active", - "posthog_user"."date_joined", - "posthog_user"."uuid", - "posthog_user"."current_organization_id", - "posthog_user"."current_team_id", - "posthog_user"."email", - "posthog_user"."pending_email", - "posthog_user"."temporary_token", - "posthog_user"."distinct_id", - "posthog_user"."is_email_verified", - "posthog_user"."requested_password_reset_at", - "posthog_user"."has_seen_product_intro_for", - "posthog_user"."strapi_id", - "posthog_user"."theme_mode", - "posthog_user"."partial_notification_settings", - "posthog_user"."anonymize_data", - "posthog_user"."toolbar_mode", - "posthog_user"."hedgehog_config", - "posthog_user"."events_column_config", - "posthog_user"."email_opt_in" - FROM "posthog_action" - LEFT OUTER JOIN "posthog_action_events" ON ("posthog_action"."id" = "posthog_action_events"."action_id") - LEFT OUTER JOIN "posthog_user" ON ("posthog_action"."created_by_id" = "posthog_user"."id") - WHERE (NOT "posthog_action"."deleted" - AND "posthog_action"."team_id" = 2 - AND "posthog_action"."team_id" = 2) - GROUP BY "posthog_action"."id", - "posthog_user"."id" - ORDER BY "posthog_action"."last_calculated_at" DESC, - "posthog_action"."name" ASC - LIMIT 100 - ''' -# --- # name: TestActionApi.test_listing_actions_is_not_nplus1.2 ''' SELECT "posthog_organizationmembership"."id", diff --git a/posthog/api/test/__snapshots__/test_api_docs.ambr b/posthog/api/test/__snapshots__/test_api_docs.ambr index f0bd70513607a..3703dc9ea6093 100644 --- a/posthog/api/test/__snapshots__/test_api_docs.ambr +++ b/posthog/api/test/__snapshots__/test_api_docs.ambr @@ -5,6 +5,7 @@ '/home/runner/work/posthog/posthog/posthog/api/organization.py: Warning [OrganizationViewSet > OrganizationSerializer]: unable to resolve type hint for function "get_member_count". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportOrganizationViewSet]: could not derive type of path parameter "organization_id" because model "posthog.batch_exports.models.BatchExport" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportOrganizationViewSet > BatchExportSerializer]: could not resolve serializer field "HogQLSelectQueryField(required=False)". Defaulting to "string"', + '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsViewSet > PluginSerializer]: unable to resolve type hint for function "get_hog_function_migration_available". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/api/proxy_record.py: Warning [ProxyRecordViewset]: could not derive type of path parameter "organization_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/proxy_record.py: Warning [ProxyRecordViewset]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/api/role.py: Warning [RoleViewSet > RoleSerializer]: unable to resolve type hint for function "get_members". Consider using a type hint or @extend_schema_field. Defaulting to string.', @@ -79,14 +80,12 @@ '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/pydantic/_internal/_model_construction.py: Warning [QueryViewSet > ModelMetaclass]: Encountered 2 components with identical names "Person" and different classes and . This will very likely result in an incorrect schema. Try renaming one.', '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/query.py: Error [QueryViewSet]: unable to guess serializer. This is graceful fallback handling for APIViews. Consider using GenericAPIView as view base class, if view is under your control. Either way you may want to add a serializer_class (or method). Ignoring view for now.', '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "session_recording_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording.SessionRecording" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer]: could not resolve field on model with path "viewed". This is likely a custom field that does some unknown magic. Maybe consider annotating the field/property? Defaulting to "string". (Exception: SessionRecording has no field named \'viewed\')', '/home/runner/work/posthog/posthog/posthog/api/person.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer > MinimalPersonSerializer]: unable to resolve type hint for function "get_distinct_ids". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer]: unable to resolve type hint for function "storage". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/session.py: Error [SessionViewSet]: unable to guess serializer. This is graceful fallback handling for APIViews. Consider using GenericAPIView as view base class, if view is under your control. Either way you may want to add a serializer_class (or method). Ignoring view for now.', '/home/runner/work/posthog/posthog/posthog/api/session.py: Warning [SessionViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/api/subscription.py: Warning [SubscriptionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.subscription.Subscription" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/api/subscription.py: Warning [SubscriptionViewSet > SubscriptionSerializer]: unable to resolve type hint for function "summary". Consider using a type hint or @extend_schema_field. Defaulting to string.', diff --git a/posthog/api/test/batch_exports/test_backfill.py b/posthog/api/test/batch_exports/test_backfill.py index e7a0eccadbc3a..8a760e5e7c0cd 100644 --- a/posthog/api/test/batch_exports/test_backfill.py +++ b/posthog/api/test/batch_exports/test_backfill.py @@ -1,5 +1,8 @@ +import datetime as dt + import pytest from django.test.client import Client as HttpClient +from freezegun import freeze_time from rest_framework import status from posthog.api.test.batch_exports.conftest import start_test_worker @@ -97,6 +100,48 @@ def test_batch_export_backfill_with_non_isoformatted_dates(client: HttpClient): assert response.status_code == status.HTTP_400_BAD_REQUEST, response.json() +def test_batch_export_backfill_with_end_at_in_the_future(client: HttpClient): + """Test a BatchExport backfill fails if we pass malformed dates.""" + temporal = sync_connect() + + destination_data = { + "type": "S3", + "config": { + "bucket_name": "my-production-s3-bucket", + "region": "us-east-1", + "prefix": "posthog-events/", + "aws_access_key_id": "abc123", + "aws_secret_access_key": "secret", + }, + } + batch_export_data = { + "name": "my-production-s3-bucket-destination", + "destination": destination_data, + "interval": "hour", + } + + organization = create_organization("Test Org") + team = create_team(organization) + user = create_user("test@user.com", "Test User", organization) + test_time = dt.datetime.now(dt.UTC) + client.force_login(user) + + with start_test_worker(temporal): + batch_export = create_batch_export_ok(client, team.pk, batch_export_data) + + batch_export_id = batch_export["id"] + + with freeze_time(test_time): + response = backfill_batch_export( + client, + team.pk, + batch_export_id, + test_time.isoformat(), + (test_time + dt.timedelta(hours=1, seconds=1)).isoformat(), + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST, response.json() + + def test_batch_export_backfill_with_naive_bounds(client: HttpClient): """Test a BatchExport backfill fails if we naive dates.""" temporal = sync_connect() diff --git a/posthog/api/test/dashboards/test_dashboard.py b/posthog/api/test/dashboards/test_dashboard.py index ae4840b60550c..a23edc4ccc9c8 100644 --- a/posthog/api/test/dashboards/test_dashboard.py +++ b/posthog/api/test/dashboards/test_dashboard.py @@ -553,7 +553,9 @@ def test_dashboard_insights_out_of_synch_with_tiles_are_not_shown(self): dashboard = Dashboard.objects.get(id=dashboard_id) mock_view = MagicMock() mock_view.action = "retrieve" - dashboard_data = DashboardSerializer(dashboard, context={"view": mock_view, "request": MagicMock()}).data + mock_request = MagicMock() + mock_request.query_params.get.return_value = None + dashboard_data = DashboardSerializer(dashboard, context={"view": mock_view, "request": mock_request}).data assert len(dashboard_data["tiles"]) == 1 def test_dashboard_insight_tiles_can_be_loaded_correct_context(self): @@ -568,7 +570,6 @@ def test_dashboard_insight_tiles_can_be_loaded_correct_context(self): response = self.dashboard_api.get_dashboard(dashboard_id) self.assertEqual(len(response["tiles"]), 1) - self.assertEqual(len(response["tiles"]), 1) tile = response["tiles"][0] assert tile["insight"]["id"] == insight_id diff --git a/posthog/api/test/test_action.py b/posthog/api/test/test_action.py index 3e4a0c4f56d87..96ae589fdea35 100644 --- a/posthog/api/test/test_action.py +++ b/posthog/api/test/test_action.py @@ -100,7 +100,7 @@ def test_create_action_generates_bytecode(self): ) assert response.status_code == status.HTTP_201_CREATED, response.json() action = Action.objects.get(pk=response.json()["id"]) - assert action.bytecode == ["_h", 32, "%/signup%", 32, "$current_url", 32, "properties", 1, 2, 17] + assert action.bytecode == ["_H", 1, 32, "%/signup%", 32, "$current_url", 32, "properties", 1, 2, 17] def test_cant_create_action_with_the_same_name(self, *args): original_action = Action.objects.create(name="user signed up", team=self.team) diff --git a/posthog/api/test/test_alert.py b/posthog/api/test/test_alert.py index 87eab7245829d..49d502256294c 100644 --- a/posthog/api/test/test_alert.py +++ b/posthog/api/test/test_alert.py @@ -28,20 +28,33 @@ def setUp(self): def test_create_and_delete_alert(self) -> None: creation_request = { "insight": self.insight["id"], - "target_value": "test@posthog.com", + "subscribed_users": [ + self.user.id, + ], "name": "alert name", - "anomaly_condition": {}, + "threshold": {"configuration": {}}, } response = self.client.post(f"/api/projects/{self.team.id}/alerts", creation_request) expected_alert_json = { + "condition": {}, + "created_at": mock.ANY, + "created_by": mock.ANY, + "enabled": True, "id": mock.ANY, - "insight": self.insight["id"], - "target_value": "test@posthog.com", + "insight": mock.ANY, + "last_notified_at": None, "name": "alert name", - "anomaly_condition": {}, + "subscribed_users": mock.ANY, + "state": "inactive", + "threshold": { + "configuration": {}, + "created_at": mock.ANY, + "id": mock.ANY, + "name": "", + }, } - assert response.status_code == status.HTTP_201_CREATED + assert response.status_code == status.HTTP_201_CREATED, response.content assert response.json() == expected_alert_json alerts = self.client.get(f"/api/projects/{self.team.id}/alerts") @@ -55,9 +68,11 @@ def test_create_and_delete_alert(self) -> None: def test_incorrect_creation(self) -> None: creation_request = { - "target_value": "test@posthog.com", + "subscribed_users": [ + self.user.id, + ], + "threshold": {"configuration": {}}, "name": "alert name", - "anomaly_condition": {}, } response = self.client.post(f"/api/projects/{self.team.id}/alerts", creation_request) assert response.status_code == status.HTTP_400_BAD_REQUEST @@ -71,9 +86,11 @@ def test_incorrect_creation(self) -> None: ).json() creation_request = { "insight": str(another_team_insight["id"]), - "target_value": "test@posthog.com", + "subscribed_users": [ + self.user.id, + ], + "threshold": {"configuration": {}}, "name": "alert name", - "anomaly_condition": {}, } response = self.client.post(f"/api/projects/{self.team.id}/alerts", creation_request) assert response.status_code == status.HTTP_400_BAD_REQUEST @@ -81,9 +98,11 @@ def test_incorrect_creation(self) -> None: def test_create_and_list_alert(self) -> None: creation_request = { "insight": self.insight["id"], - "target_value": "test@posthog.com", + "subscribed_users": [ + self.user.id, + ], + "threshold": {"configuration": {}}, "name": "alert name", - "anomaly_condition": {}, } alert = self.client.post(f"/api/projects/{self.team.id}/alerts", creation_request).json() @@ -99,15 +118,35 @@ def test_create_and_list_alert(self) -> None: assert list_for_another_insight.status_code == status.HTTP_200_OK assert len(list_for_another_insight.json()["results"]) == 0 + def test_alert_limit(self) -> None: + with mock.patch("posthog.api.alert.AlertConfiguration.ALERTS_PER_TEAM") as alert_limit: + alert_limit.__get__ = mock.Mock(return_value=1) + + creation_request = { + "insight": self.insight["id"], + "subscribed_users": [ + self.user.id, + ], + "threshold": {"configuration": {}}, + "name": "alert name", + } + self.client.post(f"/api/projects/{self.team.id}/alerts", creation_request) + + alert_2 = self.client.post(f"/api/projects/{self.team.id}/alerts", creation_request).json() + + assert alert_2["code"] == "invalid_input" + def test_alert_is_deleted_on_insight_update(self) -> None: another_insight = self.client.post( f"/api/projects/{self.team.id}/insights", data=self.default_insight_data ).json() creation_request = { "insight": another_insight["id"], - "target_value": "test@posthog.com", + "subscribed_users": [ + self.user.id, + ], + "threshold": {"configuration": {}}, "name": "alert name", - "anomaly_condition": {}, } alert = self.client.post(f"/api/projects/{self.team.id}/alerts", creation_request).json() diff --git a/posthog/api/test/test_hog_function.py b/posthog/api/test/test_hog_function.py index 191b3473da27d..f62a96714a311 100644 --- a/posthog/api/test/test_hog_function.py +++ b/posthog/api/test/test_hog_function.py @@ -5,6 +5,7 @@ from inline_snapshot import snapshot from rest_framework import status +from hogvm.python.operation import HOGQL_BYTECODE_VERSION from posthog.constants import AvailableFeature from posthog.models.action.action import Action from posthog.models.hog_functions.hog_function import DEFAULT_STATE, HogFunction @@ -140,10 +141,10 @@ def test_create_hog_function(self, *args): "updated_at": ANY, "enabled": False, "hog": "fetch(inputs.url);", - "bytecode": ["_h", 32, "url", 32, "inputs", 1, 2, 2, "fetch", 1, 35], + "bytecode": ["_H", HOGQL_BYTECODE_VERSION, 32, "url", 32, "inputs", 1, 2, 2, "fetch", 1, 35], "inputs_schema": [], "inputs": {}, - "filters": {"bytecode": ["_h", 29]}, + "filters": {"bytecode": ["_H", HOGQL_BYTECODE_VERSION, 29]}, "icon_url": None, "template": None, "masking": None, @@ -325,7 +326,7 @@ def test_generates_hog_bytecode(self, *args): ) # JSON loads for one line comparison assert response.json()["bytecode"] == json.loads( - '["_h", 33, 0, 33, 3, 36, 0, 15, 40, 45, 33, 1, 36, 0, 6, 37, 0, 32, "headers", 32, "x-count", 36, 0, 42, 1, 32, "body", 32, "payload", 32, "inputs", 1, 2, 32, "method", 32, "method", 32, "inputs", 1, 2, 42, 3, 32, "url", 32, "inputs", 1, 2, 2, "fetch", 2, 35, 39, -52, 35]' + f'["_H", {HOGQL_BYTECODE_VERSION}, 33, 0, 33, 3, 36, 0, 15, 40, 45, 33, 1, 36, 0, 6, 37, 0, 32, "url", 32, "inputs", 1, 2, 32, "headers", 32, "x-count", 36, 0, 42, 1, 32, "body", 32, "payload", 32, "inputs", 1, 2, 32, "method", 32, "method", 32, "inputs", 1, 2, 42, 3, 2, "fetch", 2, 35, 39, -52, 35]' ), response.json() def test_generates_inputs_bytecode(self, *args): @@ -334,7 +335,12 @@ def test_generates_inputs_bytecode(self, *args): assert response.json()["inputs"] == { "url": { "value": "http://localhost:2080/0e02d917-563f-4050-9725-aad881b69937", - "bytecode": ["_h", 32, "http://localhost:2080/0e02d917-563f-4050-9725-aad881b69937"], + "bytecode": [ + "_H", + HOGQL_BYTECODE_VERSION, + 32, + "http://localhost:2080/0e02d917-563f-4050-9725-aad881b69937", + ], }, "payload": { "value": { @@ -345,18 +351,48 @@ def test_generates_inputs_bytecode(self, *args): "event_url": "{f'{event.url}-test'}", }, "bytecode": { - "event": ["_h", 32, "event", 1, 1], - "groups": ["_h", 32, "groups", 1, 1], - "nested": {"foo": ["_h", 32, "url", 32, "event", 1, 2]}, - "person": ["_h", 32, "person", 1, 1], - "event_url": ["_h", 32, "-test", 32, "url", 32, "event", 1, 2, 2, "concat", 2], + "event": ["_H", HOGQL_BYTECODE_VERSION, 32, "event", 1, 1], + "groups": ["_H", HOGQL_BYTECODE_VERSION, 32, "groups", 1, 1], + "nested": {"foo": ["_H", HOGQL_BYTECODE_VERSION, 32, "url", 32, "event", 1, 2]}, + "person": ["_H", HOGQL_BYTECODE_VERSION, 32, "person", 1, 1], + "event_url": [ + "_H", + HOGQL_BYTECODE_VERSION, + 32, + "url", + 32, + "event", + 1, + 2, + 32, + "-test", + 2, + "concat", + 2, + ], }, }, "method": {"value": "POST"}, "headers": { "value": {"version": "v={event.properties.$lib_version}"}, "bytecode": { - "version": ["_h", 32, "$lib_version", 32, "properties", 32, "event", 1, 3, 32, "v=", 2, "concat", 2] + "version": [ + "_H", + HOGQL_BYTECODE_VERSION, + 32, + "v=", + 32, + "$lib_version", + 32, + "properties", + 32, + "event", + 1, + 3, + 2, + "concat", + 2, + ] }, }, } @@ -394,16 +430,19 @@ def test_generates_filters_bytecode(self, *args): "actions": [{"id": f"{action.id}", "name": "Test Action", "type": "actions", "order": 1}], "filter_test_accounts": True, "bytecode": [ - "_h", + "_H", + HOGQL_BYTECODE_VERSION, 32, - "%docs%", + "%@posthog.com%", 32, - "$current_url", + "email", 32, "properties", + 32, + "person", 1, - 2, - 17, + 3, + 20, 32, "$pageview", 32, @@ -424,8 +463,6 @@ def test_generates_filters_bytecode(self, *args): 1, 3, 20, - 3, - 2, 32, "$pageview", 32, @@ -434,16 +471,16 @@ def test_generates_filters_bytecode(self, *args): 1, 11, 32, - "%@posthog.com%", + "%docs%", 32, - "email", + "$current_url", 32, "properties", - 32, - "person", 1, + 2, + 17, 3, - 20, + 2, 3, 2, 4, @@ -465,7 +502,7 @@ def test_saves_masking_config(self, *args): "ttl": 60, "threshold": 20, "hash": "{person.properties.email}", - "bytecode": ["_h", 32, "email", 32, "properties", 32, "person", 1, 3], + "bytecode": ["_H", HOGQL_BYTECODE_VERSION, 32, "email", 32, "properties", 32, "person", 1, 3], } ) diff --git a/posthog/api/test/test_insight.py b/posthog/api/test/test_insight.py index 9962db4023311..0ce66a4bb6182 100644 --- a/posthog/api/test/test_insight.py +++ b/posthog/api/test/test_insight.py @@ -311,6 +311,7 @@ def test_get_insight_in_shared_context(self) -> None: dashboard=mock.ANY, execution_mode=ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE, user=mock.ANY, + filters_override=None, ) with patch( @@ -322,6 +323,7 @@ def test_get_insight_in_shared_context(self) -> None: dashboard=mock.ANY, execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, user=mock.ANY, + filters_override=None, ) def test_get_insight_by_short_id(self) -> None: diff --git a/posthog/api/test/test_plugin.py b/posthog/api/test/test_plugin.py index 0af1f4ef52de4..f92350045a38c 100644 --- a/posthog/api/test/test_plugin.py +++ b/posthog/api/test/test_plugin.py @@ -496,6 +496,7 @@ def test_create_plugin_repo_url(self, mock_get, mock_reload): "capabilities": {}, "metrics": {}, "public_jobs": {}, + "hog_function_migration_available": False, }, ) self.assertEqual(Plugin.objects.count(), 1) @@ -540,6 +541,7 @@ def test_create_plugin_commit_url(self, mock_get, mock_reload): "capabilities": {}, "metrics": {}, "public_jobs": {}, + "hog_function_migration_available": False, }, ) self.assertEqual(Plugin.objects.count(), 1) @@ -586,6 +588,7 @@ def test_create_plugin_other_commit_url(self, mock_get, mock_reload): "capabilities": {}, "metrics": {}, "public_jobs": {}, + "hog_function_migration_available": False, }, ) self.assertEqual(Plugin.objects.count(), 1) @@ -753,6 +756,7 @@ def test_transpile_plugin_frontend_source(self, mock_get, mock_reload): "capabilities": {}, "metrics": {}, "public_jobs": {}, + "hog_function_migration_available": False, } assert Plugin.objects.count() == 1 diff --git a/posthog/api/user.py b/posthog/api/user.py index 4a6c11ccf19f4..e60fa08f896e5 100644 --- a/posthog/api/user.py +++ b/posthog/api/user.py @@ -24,7 +24,7 @@ from django_otp.util import random_hex from loginas.utils import is_impersonated_session from rest_framework import exceptions, mixins, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import NotFound from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework.response import Response diff --git a/posthog/api/utils.py b/posthog/api/utils.py index 6c38ba3ea4700..69abed44fd27f 100644 --- a/posthog/api/utils.py +++ b/posthog/api/utils.py @@ -1,4 +1,7 @@ import json +from rest_framework.decorators import action as drf_action +from functools import wraps +from posthog.api.documentation import extend_schema import re import socket import urllib.parse @@ -422,3 +425,46 @@ def hostname_in_allowed_url_list(allowed_url_list: Optional[list[str]], hostname def parse_domain(url: Any) -> Optional[str]: return urlparse(url).hostname + + +# By default, DRF spectacular uses the serializer of the view as the response format for actions. However, most actions don't return a version of the model, but something custom. This function removes the response from all actions in the documentation. +def action(methods=None, detail=None, url_path=None, url_name=None, responses=None, **kwargs): + """ + Mark a ViewSet method as a routable action. + + `@action`-decorated functions will be endowed with a `mapping` property, + a `MethodMapper` that can be used to add additional method-based behaviors + on the routed action. + + :param methods: A list of HTTP method names this action responds to. + Defaults to GET only. + :param detail: Required. Determines whether this action applies to + instance/detail requests or collection/list requests. + :param url_path: Define the URL segment for this action. Defaults to the + name of the method decorated. + :param url_name: Define the internal (`reverse`) URL name for this action. + Defaults to the name of the method decorated with underscores + replaced with dashes. + :param responses: Serializer or pydantic model of the response for documentation + :param kwargs: Additional properties to set on the view. This can be used + to override viewset-level *_classes settings, equivalent to + how the `@renderer_classes` etc. decorators work for function- + based API views. + """ + + def decorator(func): + @extend_schema(responses=responses) + @drf_action( + methods=methods, + detail=detail, + url_path=url_path, + url_name=url_name, + **kwargs, + ) + @wraps(func) + def wrapped_function(*args, **kwargs): + return func(*args, **kwargs) + + return wrapped_function + + return decorator diff --git a/posthog/batch_exports/http.py b/posthog/batch_exports/http.py index 596bb2ba4252a..bd56a092771e3 100644 --- a/posthog/batch_exports/http.py +++ b/posthog/batch_exports/http.py @@ -6,7 +6,7 @@ from django.db import transaction from django.utils.timezone import now from rest_framework import filters, request, response, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ( NotAuthenticated, NotFound, @@ -390,6 +390,12 @@ def backfill(self, request: request.Request, *args, **kwargs) -> response.Respon raise ValidationError("The initial backfill datetime 'start_at' happens after 'end_at'") batch_export = self.get_object() + + if end_at > dt.datetime.now(dt.UTC) + batch_export.interval_time_delta: + raise ValidationError( + f"The provided 'end_at' ({end_at.isoformat()}) is too far into the future. Cannot backfill beyond 1 batch period into the future." + ) + temporal = sync_connect() try: backfill_id = backfill_export(temporal, str(batch_export.pk), self.team_id, start_at, end_at) diff --git a/posthog/batch_exports/models.py b/posthog/batch_exports/models.py index e27e34ecd3ab7..e319d73db30ab 100644 --- a/posthog/batch_exports/models.py +++ b/posthog/batch_exports/models.py @@ -39,21 +39,21 @@ class Destination(models.TextChoices): "NoOp": set(), } - type: models.CharField = models.CharField( + type = models.CharField( choices=Destination.choices, max_length=64, help_text="A choice of supported BatchExportDestination types.", ) - config: models.JSONField = models.JSONField( + config = models.JSONField( default=dict, blank=True, help_text="A JSON field to store all configuration parameters required to access a BatchExportDestination.", ) - created_at: models.DateTimeField = models.DateTimeField( + created_at = models.DateTimeField( auto_now_add=True, help_text="The timestamp at which this BatchExportDestination was created.", ) - last_updated_at: models.DateTimeField = models.DateTimeField( + last_updated_at = models.DateTimeField( auto_now=True, help_text="The timestamp at which this BatchExportDestination was last updated.", ) @@ -85,31 +85,25 @@ class Status(models.TextChoices): on_delete=models.CASCADE, help_text="The BatchExport this run belongs to.", ) - status: models.CharField = models.CharField( - choices=Status.choices, max_length=64, help_text="The status of this run." - ) - records_completed: models.IntegerField = models.IntegerField( - null=True, help_text="The number of records that have been exported." - ) - latest_error: models.TextField = models.TextField( - null=True, help_text="The latest error that occurred during this run." - ) - data_interval_start: models.DateTimeField = models.DateTimeField(help_text="The start of the data interval.") - data_interval_end: models.DateTimeField = models.DateTimeField(help_text="The end of the data interval.") - cursor: models.TextField = models.TextField(null=True, help_text="An opaque cursor that may be used to resume.") - created_at: models.DateTimeField = models.DateTimeField( + status = models.CharField(choices=Status.choices, max_length=64, help_text="The status of this run.") + records_completed = models.IntegerField(null=True, help_text="The number of records that have been exported.") + latest_error = models.TextField(null=True, help_text="The latest error that occurred during this run.") + data_interval_start = models.DateTimeField(help_text="The start of the data interval.") + data_interval_end = models.DateTimeField(help_text="The end of the data interval.") + cursor = models.TextField(null=True, help_text="An opaque cursor that may be used to resume.") + created_at = models.DateTimeField( auto_now_add=True, help_text="The timestamp at which this BatchExportRun was created.", ) - finished_at: models.DateTimeField = models.DateTimeField( + finished_at = models.DateTimeField( null=True, help_text="The timestamp at which this BatchExportRun finished, successfully or not.", ) - last_updated_at: models.DateTimeField = models.DateTimeField( + last_updated_at = models.DateTimeField( auto_now=True, help_text="The timestamp at which this BatchExportRun was last updated.", ) - records_total_count: models.IntegerField = models.IntegerField( + records_total_count = models.IntegerField( null=True, help_text="The total count of records that should be exported in this BatchExportRun." ) @@ -175,9 +169,9 @@ class Model(models.TextChoices): EVENTS = "events" PERSONS = "persons" - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, help_text="The team this belongs to.") - name: models.TextField = models.TextField(help_text="A human-readable name for this BatchExport.") - destination: models.ForeignKey = models.ForeignKey( + team = models.ForeignKey("Team", on_delete=models.CASCADE, help_text="The team this belongs to.") + name = models.TextField(help_text="A human-readable name for this BatchExport.") + destination = models.ForeignKey( "BatchExportDestination", on_delete=models.CASCADE, help_text="The destination to export data to.", @@ -191,31 +185,31 @@ class Model(models.TextChoices): ) paused = models.BooleanField(default=False, help_text="Whether this BatchExport is paused or not.") deleted = models.BooleanField(default=False, help_text="Whether this BatchExport is deleted or not.") - created_at: models.DateTimeField = models.DateTimeField( + created_at = models.DateTimeField( auto_now_add=True, help_text="The timestamp at which this BatchExport was created.", ) - last_updated_at: models.DateTimeField = models.DateTimeField( + last_updated_at = models.DateTimeField( auto_now=True, help_text="The timestamp at which this BatchExport was last updated.", ) - last_paused_at: models.DateTimeField = models.DateTimeField( + last_paused_at = models.DateTimeField( null=True, default=None, help_text="The timestamp at which this BatchExport was last paused.", ) - start_at: models.DateTimeField = models.DateTimeField( + start_at = models.DateTimeField( null=True, default=None, help_text="Time before which any Batch Export runs won't be triggered.", ) - end_at: models.DateTimeField = models.DateTimeField( + end_at = models.DateTimeField( null=True, default=None, help_text="Time after which any Batch Export runs won't be triggered.", ) - schema: models.JSONField = models.JSONField( + schema = models.JSONField( null=True, default=None, help_text="A schema of custom fields to select when exporting data.", @@ -265,26 +259,24 @@ class Status(models.TextChoices): RUNNING = "Running" STARTING = "Starting" - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, help_text="The team this belongs to.") + team = models.ForeignKey("Team", on_delete=models.CASCADE, help_text="The team this belongs to.") batch_export = models.ForeignKey( "BatchExport", on_delete=models.CASCADE, help_text="The BatchExport this backfill belongs to.", ) - start_at: models.DateTimeField = models.DateTimeField(help_text="The start of the data interval.") - end_at: models.DateTimeField = models.DateTimeField(help_text="The end of the data interval.", null=True) - status: models.CharField = models.CharField( - choices=Status.choices, max_length=64, help_text="The status of this backfill." - ) - created_at: models.DateTimeField = models.DateTimeField( + start_at = models.DateTimeField(help_text="The start of the data interval.") + end_at = models.DateTimeField(help_text="The end of the data interval.", null=True) + status = models.CharField(choices=Status.choices, max_length=64, help_text="The status of this backfill.") + created_at = models.DateTimeField( auto_now_add=True, help_text="The timestamp at which this BatchExportBackfill was created.", ) - finished_at: models.DateTimeField = models.DateTimeField( + finished_at = models.DateTimeField( null=True, help_text="The timestamp at which this BatchExportBackfill finished, successfully or not.", ) - last_updated_at: models.DateTimeField = models.DateTimeField( + last_updated_at = models.DateTimeField( auto_now=True, help_text="The timestamp at which this BatchExportBackfill was last updated.", ) diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py index 16bea15ec3e51..49cccbac142e2 100644 --- a/posthog/batch_exports/service.py +++ b/posthog/batch_exports/service.py @@ -537,7 +537,7 @@ def update_batch_export_run( run_id: The id of the BatchExportRun to update. """ model = BatchExportRun.objects.filter(id=run_id) - update_at = dt.datetime.now() + update_at = dt.datetime.now(dt.UTC) updated = model.update( **kwargs, @@ -560,7 +560,7 @@ async def aupdate_batch_export_run( run_id: The id of the BatchExportRun to update. """ model = BatchExportRun.objects.filter(id=run_id) - update_at = dt.datetime.now() + update_at = dt.datetime.now(dt.UTC) updated = await model.aupdate( **kwargs, diff --git a/posthog/caching/calculate_results.py b/posthog/caching/calculate_results.py index f1b4d50fc4bc8..7da32bb9e88cd 100644 --- a/posthog/caching/calculate_results.py +++ b/posthog/caching/calculate_results.py @@ -123,7 +123,12 @@ def get_cache_type(cacheable: Optional[FilterType] | Optional[dict]) -> CacheTyp def calculate_for_query_based_insight( - insight: Insight, *, dashboard: Optional[Dashboard] = None, execution_mode: ExecutionMode, user: Optional[User] + insight: Insight, + *, + dashboard: Optional[Dashboard] = None, + execution_mode: ExecutionMode, + user: Optional[User], + filters_override: Optional[dict] = None, ) -> "InsightResult": from posthog.caching.fetch_from_cache import InsightResult, NothingInCacheResult from posthog.caching.insight_cache import update_cached_state @@ -135,7 +140,9 @@ def calculate_for_query_based_insight( response = process_response = process_query_dict( insight.team, insight.query, - dashboard_filters_json=dashboard.filters if dashboard is not None else None, + dashboard_filters_json=( + filters_override if filters_override is not None else dashboard.filters if dashboard is not None else None + ), execution_mode=execution_mode, user=user, insight_id=insight.pk, diff --git a/posthog/caching/warming.py b/posthog/caching/warming.py index 7e5608ba7d714..681cb773ff871 100644 --- a/posthog/caching/warming.py +++ b/posthog/caching/warming.py @@ -1,3 +1,4 @@ +import itertools from datetime import timedelta, UTC, datetime from collections.abc import Generator from typing import Optional @@ -6,7 +7,7 @@ from celery import shared_task from celery.canvas import chain from django.db.models import Q -from prometheus_client import Counter +from prometheus_client import Counter, Gauge from sentry_sdk import capture_exception from posthog.api.services.query import process_query_dict @@ -21,8 +22,8 @@ logger = structlog.get_logger(__name__) -STALE_INSIGHTS_COUNTER = Counter( - "posthog_cache_warming_stale_insights", +STALE_INSIGHTS_GAUGE = Gauge( + "posthog_cache_warming_stale_insights_gauge", "Number of stale insights present", ["team_id"], ) @@ -35,7 +36,7 @@ LAST_VIEWED_THRESHOLD = timedelta(days=7) -def priority_insights(team: Team) -> Generator[tuple[int, Optional[int]], None, None]: +def priority_insights(team: Team, shared_only: bool = False) -> Generator[tuple[int, Optional[int]], None, None]: """ This is the place to decide which insights should be kept warm. The reasoning is that this will be a yes or no decision. If we need to keep it warm, we try our best @@ -47,7 +48,7 @@ def priority_insights(team: Team) -> Generator[tuple[int, Optional[int]], None, QueryCacheManager.clean_up_stale_insights(team_id=team.pk, threshold=threshold) combos = QueryCacheManager.get_stale_insights(team_id=team.pk, limit=500) - STALE_INSIGHTS_COUNTER.labels(team_id=team.pk).inc(len(combos)) + STALE_INSIGHTS_GAUGE.labels(team_id=team.pk).set(len(combos)) dashboard_q_filter = Q() insight_ids_single = set() @@ -59,17 +60,22 @@ def priority_insights(team: Team) -> Generator[tuple[int, Optional[int]], None, insight_ids_single.add(insight_id) if insight_ids_single: - single_insights = ( - team.insight_set.filter(insightviewed__last_viewed_at__gte=threshold, pk__in=insight_ids_single) - .distinct() - .values_list("id", flat=True) + single_insights = team.insight_set.filter( + insightviewed__last_viewed_at__gte=threshold, + pk__in=insight_ids_single, ) - for single_insight_id in single_insights: + if shared_only: + single_insights = single_insights.filter(sharingconfiguration__enabled=True) + + for single_insight_id in single_insights.distinct().values_list("id", flat=True): yield single_insight_id, None if not dashboard_q_filter: return + if shared_only: + dashboard_q_filter &= Q(dashboard__sharingconfiguration__enabled=True) + dashboard_tiles = ( DashboardTile.objects.filter(dashboard__last_accessed_at__gte=threshold) .filter(dashboard_q_filter) @@ -82,16 +88,32 @@ def priority_insights(team: Team) -> Generator[tuple[int, Optional[int]], None, @shared_task(ignore_result=True, expires=60 * 15) def schedule_warming_for_teams_task(): team_ids = largest_teams(limit=10) + threshold = datetime.now(UTC) - LAST_VIEWED_THRESHOLD - teams = Team.objects.filter(Q(pk__in=team_ids) | Q(extra_settings__insights_cache_warming=True)) + prio_teams = Team.objects.filter(Q(pk__in=team_ids) | Q(extra_settings__insights_cache_warming=True)) + teams_with_recently_viewed_shared = Team.objects.filter( + Q( + Q(sharingconfiguration__dashboard__last_accessed_at__gte=threshold) + | Q(sharingconfiguration__insight__insightviewed__last_viewed_at__gte=threshold) + ), + sharingconfiguration__enabled=True, + ).difference(prio_teams) + + all_teams = itertools.chain( + zip(prio_teams, [False] * len(prio_teams)), + zip(teams_with_recently_viewed_shared, [True] * len(teams_with_recently_viewed_shared)), + ) - logger.info("Warming insight cache: teams", team_ids=[team.pk for team in teams]) + # Use a fixed expiration time since tasks in the chain are executed sequentially + expire_after = datetime.now(UTC) + timedelta(minutes=50) - for team in teams: - insight_tuples = priority_insights(team) + for team, shared_only in all_teams: + insight_tuples = priority_insights(team, shared_only=shared_only) # We chain the task execution to prevent queries *for a single team* running at the same time - chain(*(warm_insight_cache_task.si(*insight_tuple) for insight_tuple in insight_tuples))() + chain( + *(warm_insight_cache_task.si(*insight_tuple).set(expires=expire_after) for insight_tuple in insight_tuples) + )() @shared_task( @@ -103,7 +125,7 @@ def schedule_warming_for_teams_task(): retry_backoff_max=3, max_retries=3, ) -def warm_insight_cache_task(insight_id: int, dashboard_id: int): +def warm_insight_cache_task(insight_id: int, dashboard_id: Optional[int]): insight = Insight.objects.get(pk=insight_id) dashboard = None @@ -124,6 +146,8 @@ def warm_insight_cache_task(insight_id: int, dashboard_id: int): # - in case someone refreshed after this task was triggered # - if insight + dashboard combinations have the same cache key, we prevent needless recalculations execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, + insight_id=insight_id, + dashboard_id=dashboard_id, ) PRIORITY_INSIGHTS_COUNTER.labels( diff --git a/posthog/cdp/filters.py b/posthog/cdp/filters.py index a6668f12a499c..c6bebe16e651f 100644 --- a/posthog/cdp/filters.py +++ b/posthog/cdp/filters.py @@ -28,8 +28,14 @@ def hog_function_filters_to_expr(filters: dict, team: Team, actions: dict[int, A exprs.extend(common_filters_expr) # Events - if filter.get("type") == "events" and filter.get("name"): - exprs.append(parse_expr("event = {event}", {"event": ast.Constant(value=filter["name"])})) + if filter.get("type") == "events" and filter.get("id"): + event_name = filter["id"] + + if event_name is None: + # all events + exprs.append(ast.Constant(value=1)) + else: + exprs.append(parse_expr("event = {event}", {"event": ast.Constant(value=event_name)})) # Actions if filter.get("type") == "actions": diff --git a/posthog/cdp/templates/__init__.py b/posthog/cdp/templates/__init__.py index 093ddd84b36a6..ca1b3fcea137c 100644 --- a/posthog/cdp/templates/__init__.py +++ b/posthog/cdp/templates/__init__.py @@ -1,7 +1,7 @@ from .webhook.template_webhook import template as webhook from .slack.template_slack import template as slack from .hubspot.template_hubspot import template as hubspot -from .customerio.template_customerio import template as customerio +from .customerio.template_customerio import template as customerio, TemplateCustomerioMigrator from .intercom.template_intercom import template as intercom from .sendgrid.template_sendgrid import template as sendgrid from .clearbit.template_clearbit import template as clearbit @@ -37,4 +37,8 @@ HOG_FUNCTION_TEMPLATES_BY_ID = {template.id: template for template in HOG_FUNCTION_TEMPLATES} +HOG_FUNCTION_MIGRATORS = { + TemplateCustomerioMigrator.plugin_url: TemplateCustomerioMigrator, +} + __all__ = ["HOG_FUNCTION_TEMPLATES", "HOG_FUNCTION_TEMPLATES_BY_ID"] diff --git a/posthog/cdp/templates/customerio/template_customerio.py b/posthog/cdp/templates/customerio/template_customerio.py index b410ed2c31365..a9131f1bd47e0 100644 --- a/posthog/cdp/templates/customerio/template_customerio.py +++ b/posthog/cdp/templates/customerio/template_customerio.py @@ -1,4 +1,6 @@ -from posthog.cdp.templates.hog_function_template import HogFunctionTemplate +from copy import deepcopy +import dataclasses +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate, HogFunctionTemplateMigrator # Based off of https://customer.io/docs/api/track/#operation/entity @@ -182,3 +184,62 @@ "filter_test_accounts": True, }, ) + + +class TemplateCustomerioMigrator(HogFunctionTemplateMigrator): + plugin_url = "https://github.com/PostHog/customerio-plugin" + + @classmethod + def migrate(cls, obj): + hf = deepcopy(dataclasses.asdict(template)) + + host = obj.config.get("host", "track.customer.io") + events_to_send = obj.config.get("eventsToSend") + token = obj.config.get("customerioToken", "") + customerio_site_id = obj.config.get("customerioSiteId", "") + anon_option = obj.config.get("sendEventsFromAnonymousUsers", "Send all events") + identify_by_email = obj.config.get("identifyByEmail", "No") == "Yes" + + hf["filters"] = {} + + if anon_option == "Send all events": + pass + elif anon_option == "Only send events from users with emails": + # TODO: Add support for general filters + hf["filters"]["properties"] = [ + { + "key": "email", + "value": "is_set", + "operator": "is_set", + "type": "person", + } + ] + elif anon_option == "Only send events from users that have been identified": + hf["filters"]["properties"] = [ + { + "key": "$is_identified", + "value": ["true"], + "operator": "exact", + "type": "event", + } + ] + + if events_to_send: + hf["filters"]["events"] = [ + {"id": event.strip(), "name": event.strip() or "All events", "type": "events", "order": 0} + for event in events_to_send.split(",") + ] + + hf["inputs"] = { + "action": {"value": "automatic"}, + "site_id": {"value": customerio_site_id}, + "token": {"value": token}, + "host": {"value": host}, + "identifiers": {"value": {"email": "{person.properties.email}"}} + if identify_by_email + else {"value": {"id": "{event.distinct_id}"}}, + "include_all_properties": {"value": True}, + "attributes": {"value": {}}, + } + + return hf diff --git a/posthog/cdp/templates/customerio/test_template_customerio.py b/posthog/cdp/templates/customerio/test_template_customerio.py index 823bf777e630c..606590cd97d46 100644 --- a/posthog/cdp/templates/customerio/test_template_customerio.py +++ b/posthog/cdp/templates/customerio/test_template_customerio.py @@ -1,6 +1,11 @@ from inline_snapshot import snapshot from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest -from posthog.cdp.templates.customerio.template_customerio import template as template_customerio +from posthog.cdp.templates.customerio.template_customerio import ( + TemplateCustomerioMigrator, + template as template_customerio, +) +from posthog.models.plugin import PluginConfig +from posthog.test.base import BaseTest def create_inputs(**kwargs): @@ -111,3 +116,97 @@ def test_function_requires_identifier(self): assert self.get_mock_print_calls() == snapshot( [("No identifier set. Skipping as at least 1 identifier is needed.",)] ) + + +class TestTemplateMigration(BaseTest): + def get_plugin_config(self, config: dict): + _config = { + "host": "track.customer.io", + "eventsToSend": "", + "customerioToken": "TOKEN", + "customerioSiteId": "SITE_ID", + "sendEventsFromAnonymousUsers": "Send all events", + "identifyByEmail": "No", + } + _config.update(config) + return PluginConfig(enabled=True, order=0, config=_config) + + def test_full_function(self): + obj = self.get_plugin_config({}) + + template = TemplateCustomerioMigrator.migrate(obj) + assert template["inputs"] == snapshot( + { + "action": {"value": "automatic"}, + "site_id": {"value": "SITE_ID"}, + "token": {"value": "TOKEN"}, + "host": {"value": "track.customer.io"}, + "identifiers": {"value": {"id": "{event.distinct_id}"}}, + "include_all_properties": {"value": True}, + "attributes": {"value": {}}, + } + ) + assert template["filters"] == snapshot({}) + assert template["inputs"] == snapshot( + { + "action": {"value": "automatic"}, + "site_id": {"value": "SITE_ID"}, + "token": {"value": "TOKEN"}, + "host": {"value": "track.customer.io"}, + "identifiers": {"value": {"id": "{event.distinct_id}"}}, + "include_all_properties": {"value": True}, + "attributes": {"value": {}}, + } + ) + + def test_anon_config_send_all(self): + obj = self.get_plugin_config( + { + "sendEventsFromAnonymousUsers": "Send all events", + } + ) + + template = TemplateCustomerioMigrator.migrate(obj) + assert template["filters"] == snapshot({}) + + def test_anon_config_send_emails(self): + obj = self.get_plugin_config( + { + "sendEventsFromAnonymousUsers": "Only send events from users with emails", + } + ) + + template = TemplateCustomerioMigrator.migrate(obj) + assert template["filters"] == snapshot( + {"properties": [{"key": "email", "value": "is_set", "operator": "is_set", "type": "person"}]} + ) + + def test_anon_config_send_identified(self): + obj = self.get_plugin_config( + { + "sendEventsFromAnonymousUsers": "Only send events from users that have been identified", + } + ) + + template = TemplateCustomerioMigrator.migrate(obj) + assert template["filters"] == snapshot( + {"properties": [{"key": "$is_identified", "value": ["true"], "operator": "exact", "type": "event"}]} + ) + + def test_identify_by_email(self): + obj = self.get_plugin_config({"identifyByEmail": "Yes"}) + template = TemplateCustomerioMigrator.migrate(obj) + assert template["inputs"]["identifiers"] == snapshot({"value": {"email": "{person.properties.email}"}}) + + def test_events_filters(self): + obj = self.get_plugin_config({"eventsToSend": "event1,event2, $pageview"}) + template = TemplateCustomerioMigrator.migrate(obj) + assert template["filters"] == snapshot( + { + "events": [ + {"id": "event1", "name": "event1", "type": "events", "order": 0}, + {"id": "event2", "name": "event2", "type": "events", "order": 0}, + {"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}, + ] + } + ) diff --git a/posthog/cdp/templates/hog_function_template.py b/posthog/cdp/templates/hog_function_template.py index d01a0a30212eb..9bfc7faf7cb13 100644 --- a/posthog/cdp/templates/hog_function_template.py +++ b/posthog/cdp/templates/hog_function_template.py @@ -1,5 +1,10 @@ import dataclasses -from typing import Literal, Optional +from typing import Literal, Optional, TYPE_CHECKING + +if TYPE_CHECKING: + from posthog.models.plugin import PluginConfig +else: + PluginConfig = None @dataclasses.dataclass(frozen=True) @@ -12,3 +17,12 @@ class HogFunctionTemplate: inputs_schema: list[dict] filters: Optional[dict] = None icon_url: Optional[str] = None + + +class HogFunctionTemplateMigrator: + plugin_url: str + + @classmethod + def migrate(cls, obj: PluginConfig) -> dict: + # Return a dict for the template of a new HogFunction + raise NotImplementedError() diff --git a/posthog/cdp/templates/test_cdp_templates.py b/posthog/cdp/templates/test_cdp_templates.py index 21d5d841b3a9d..889f7431e33a1 100644 --- a/posthog/cdp/templates/test_cdp_templates.py +++ b/posthog/cdp/templates/test_cdp_templates.py @@ -10,5 +10,5 @@ def setUp(self): def test_templates_are_valid(self): for template in HOG_FUNCTION_TEMPLATES: bytecode = compile_hog(template.hog) - assert bytecode[0] == "_h" + assert bytecode[0] == "_H" assert validate_inputs_schema(template.inputs_schema) diff --git a/posthog/cdp/test/test_filters.py b/posthog/cdp/test/test_filters.py index 9fc5fbcc2b15b..b8c79da81290e 100644 --- a/posthog/cdp/test/test_filters.py +++ b/posthog/cdp/test/test_filters.py @@ -1,6 +1,7 @@ import json from inline_snapshot import snapshot +from hogvm.python.operation import HOGQL_BYTECODE_VERSION from posthog.cdp.filters import hog_function_filters_to_expr from posthog.hogql.bytecode import create_bytecode from posthog.models.action.action import Action @@ -63,12 +64,35 @@ def filters_to_bytecode(self, filters: dict): return json.loads(json.dumps(create_bytecode(res))) def test_filters_empty(self): - assert self.filters_to_bytecode(filters={}) == snapshot(["_h", 29]) + assert self.filters_to_bytecode(filters={}) == snapshot(["_H", HOGQL_BYTECODE_VERSION, 29]) + + def test_filters_all_events(self): + assert self.filters_to_bytecode( + filters={ + "events": [ + { + "id": None, + "name": "All events", + "type": "events", + "order": 0, + } + ] + } + ) == snapshot(["_H", HOGQL_BYTECODE_VERSION, 29, 3, 0, 4, 2]) def test_filters_events(self): - assert self.filters_to_bytecode(filters={"events": self.filters["events"]}) == snapshot( + bytecode = self.filters_to_bytecode(filters={"events": self.filters["events"]}) + assert bytecode == snapshot( [ - "_h", + "_H", + HOGQL_BYTECODE_VERSION, + 32, + "$pageview", + 32, + "event", + 1, + 1, + 11, 32, "%docs%", 32, @@ -78,13 +102,6 @@ def test_filters_events(self): 1, 2, 18, - 32, - "$pageview", - 32, - "event", - 1, - 1, - 11, 3, 2, 4, @@ -93,9 +110,18 @@ def test_filters_events(self): ) def test_filters_actions(self): - assert self.filters_to_bytecode(filters={"actions": self.filters["actions"]}) == snapshot( + bytecode = self.filters_to_bytecode(filters={"actions": self.filters["actions"]}) + assert bytecode == snapshot( [ - "_h", + "_H", + HOGQL_BYTECODE_VERSION, + 32, + "$pageview", + 32, + "event", + 1, + 1, + 11, 32, "%docs%", 32, @@ -105,13 +131,6 @@ def test_filters_actions(self): 1, 2, 17, - 32, - "$pageview", - 32, - "event", - 1, - 1, - 11, 3, 2, 3, @@ -124,67 +143,52 @@ def test_filters_actions(self): def test_filters_properties(self): assert self.filters_to_bytecode(filters={"properties": self.filters["properties"]}) == snapshot( [ - "_h", + "_H", + HOGQL_BYTECODE_VERSION, 32, - "ben", + "%@posthog.com%", 32, - "name", + "email", 32, "properties", 32, "person", 1, 3, - 11, + 18, 32, - "%@posthog.com%", + "ben", 32, - "email", + "name", 32, "properties", 32, "person", 1, 3, - 18, + 11, 3, 2, ] ) def test_filters_full(self): - assert self.filters_to_bytecode(filters=self.filters) == snapshot( + bytecode = self.filters_to_bytecode(filters=self.filters) + assert bytecode == snapshot( [ - "_h", - 32, - "%docs%", - 32, - "$current_url", + "_H", + HOGQL_BYTECODE_VERSION, 32, - "properties", - 1, - 2, - 17, - 32, - "$pageview", - 32, - "event", - 1, - 1, - 11, - 3, - 2, - 32, - "ben", + "%@posthog.com%", 32, - "name", + "email", 32, "properties", 32, "person", 1, 3, - 11, + 20, 32, "%@posthog.com%", 32, @@ -197,18 +201,23 @@ def test_filters_full(self): 3, 18, 32, - "%@posthog.com%", + "ben", 32, - "email", + "name", 32, "properties", 32, "person", 1, 3, - 20, - 3, - 4, + 11, + 32, + "$pageview", + 32, + "event", + 1, + 1, + 11, 32, "%docs%", 32, @@ -218,24 +227,19 @@ def test_filters_full(self): 1, 2, 18, + 3, + 5, 32, - "$pageview", - 32, - "event", - 1, - 1, - 11, - 32, - "ben", + "%@posthog.com%", 32, - "name", + "email", 32, "properties", 32, "person", 1, 3, - 11, + 20, 32, "%@posthog.com%", 32, @@ -248,18 +252,36 @@ def test_filters_full(self): 3, 18, 32, - "%@posthog.com%", + "ben", 32, - "email", + "name", 32, "properties", 32, "person", 1, 3, - 20, + 11, + 32, + "$pageview", + 32, + "event", + 1, + 1, + 11, + 32, + "%docs%", + 32, + "$current_url", + 32, + "properties", + 1, + 2, + 17, 3, - 5, + 2, + 3, + 4, 4, 2, ] diff --git a/posthog/cdp/test/test_validation.py b/posthog/cdp/test/test_validation.py index 90428c9dd574a..90a41f8cca653 100644 --- a/posthog/cdp/test/test_validation.py +++ b/posthog/cdp/test/test_validation.py @@ -2,6 +2,7 @@ from inline_snapshot import snapshot +from hogvm.python.operation import HOGQL_BYTECODE_VERSION from posthog.cdp.validation import validate_inputs, validate_inputs_schema from posthog.test.base import APIBaseTest, ClickhouseTestMixin, QueryMatchingTest @@ -78,7 +79,12 @@ def test_validate_inputs(self): { "url": { "value": "http://localhost:2080/0e02d917-563f-4050-9725-aad881b69937", - "bytecode": ["_h", 32, "http://localhost:2080/0e02d917-563f-4050-9725-aad881b69937"], + "bytecode": [ + "_H", + HOGQL_BYTECODE_VERSION, + 32, + "http://localhost:2080/0e02d917-563f-4050-9725-aad881b69937", + ], }, "payload": { "value": { @@ -89,11 +95,25 @@ def test_validate_inputs(self): "event_url": "{f'{event.url}-test'}", }, "bytecode": { - "event": ["_h", 32, "event", 1, 1], - "groups": ["_h", 32, "groups", 1, 1], - "nested": {"foo": ["_h", 32, "url", 32, "event", 1, 2]}, - "person": ["_h", 32, "person", 1, 1], - "event_url": ["_h", 32, "-test", 32, "url", 32, "event", 1, 2, 2, "concat", 2], + "event": ["_H", HOGQL_BYTECODE_VERSION, 32, "event", 1, 1], + "groups": ["_H", HOGQL_BYTECODE_VERSION, 32, "groups", 1, 1], + "nested": {"foo": ["_H", HOGQL_BYTECODE_VERSION, 32, "url", 32, "event", 1, 2]}, + "person": ["_H", HOGQL_BYTECODE_VERSION, 32, "person", 1, 1], + "event_url": [ + "_H", + HOGQL_BYTECODE_VERSION, + 32, + "url", + 32, + "event", + 1, + 2, + 32, + "-test", + 2, + "concat", + 2, + ], }, }, "method": {"value": "POST"}, @@ -101,7 +121,10 @@ def test_validate_inputs(self): "value": {"version": "v={event.properties.$lib_version}"}, "bytecode": { "version": [ - "_h", + "_H", + HOGQL_BYTECODE_VERSION, + 32, + "v=", 32, "$lib_version", 32, @@ -110,8 +133,6 @@ def test_validate_inputs(self): "event", 1, 3, - 32, - "v=", 2, "concat", 2, @@ -140,9 +161,10 @@ def test_validate_inputs_creates_bytecode_for_html(self): { "html": { "bytecode": [ - "_h", + "_H", + HOGQL_BYTECODE_VERSION, 32, - "

\n\n", + '\n\n\n\n\n\n

Hi ', 32, "email", 32, @@ -152,7 +174,7 @@ def test_validate_inputs_creates_bytecode_for_html(self): 1, 3, 32, - '\n\n\n\n\n\n

Hi ', + "

\n\n", 2, "concat", 3, diff --git a/posthog/clickhouse/client/execute.py b/posthog/clickhouse/client/execute.py index 539ddfe10f151..d1423baa09a20 100644 --- a/posthog/clickhouse/client/execute.py +++ b/posthog/clickhouse/client/execute.py @@ -18,6 +18,7 @@ from posthog.settings import TEST from posthog.utils import generate_short_id, patchable from prometheus_client import Counter, Gauge +from sentry_sdk import set_tag QUERY_ERROR_COUNTER = Counter( "clickhouse_query_failure", @@ -28,6 +29,7 @@ QUERY_EXECUTION_TIME_GAUGE = Gauge( "clickhouse_query_execution_time", "Clickhouse query execution time", + labelnames=["query_type"], ) InsertParams = Union[list, tuple, types.GeneratorType] @@ -125,10 +127,15 @@ def sync_execute( query_id = validated_client_query_id() core_settings = {**default_settings(), **(settings or {})} tags["query_settings"] = core_settings + + query_type = tags.get("query_type", "Other") + set_tag("query_type", query_type) + settings = { **core_settings, "log_comment": json.dumps(tags, separators=(",", ":")), } + try: result = client.execute( prepared_sql, @@ -145,7 +152,7 @@ def sync_execute( finally: execution_time = perf_counter() - start_time - QUERY_EXECUTION_TIME_GAUGE.set(execution_time * 1000.0) + QUERY_EXECUTION_TIME_GAUGE.labels(query_type=query_type).set(execution_time * 1000.0) if query_counter := getattr(thread_local_storage, "query_counter", None): query_counter.total_query_time += execution_time diff --git a/posthog/clickhouse/migrations/0078_add_soft_delete_column_on_events.py b/posthog/clickhouse/migrations/0078_add_soft_delete_column_on_events.py new file mode 100644 index 0000000000000..c64810443c9b6 --- /dev/null +++ b/posthog/clickhouse/migrations/0078_add_soft_delete_column_on_events.py @@ -0,0 +1,34 @@ +from infi.clickhouse_orm import migrations + +from posthog.clickhouse.client.connection import ch_pool +from posthog.settings import CLICKHOUSE_CLUSTER + + +DROP_COLUMNS_EVENTS = """ +ALTER TABLE {table} ON CLUSTER {cluster} +DROP COLUMN IF EXISTS is_deleted +""" + +ADD_COLUMNS_EVENTS = """ +ALTER TABLE {table} ON CLUSTER {cluster} +ADD COLUMN IF NOT EXISTS is_deleted Boolean +""" + +ADD_COLUMNS_INDEX_EVENTS = """ +ALTER TABLE {table} ON CLUSTER {cluster} +ADD INDEX IF NOT EXISTS is_deleted_idx (is_deleted) TYPE minmax GRANULARITY 1 +""" + + +def add_columns_to_required_tables(_): + with ch_pool.get_client() as client: + client.execute(DROP_COLUMNS_EVENTS.format(table="sharded_events", cluster=CLICKHOUSE_CLUSTER)) + client.execute(DROP_COLUMNS_EVENTS.format(table="events", cluster=CLICKHOUSE_CLUSTER)) + client.execute(ADD_COLUMNS_EVENTS.format(table="sharded_events", cluster=CLICKHOUSE_CLUSTER)) + client.execute(ADD_COLUMNS_EVENTS.format(table="events", cluster=CLICKHOUSE_CLUSTER)) + client.execute(ADD_COLUMNS_INDEX_EVENTS.format(table="sharded_events", cluster=CLICKHOUSE_CLUSTER)) + + +operations = [ + migrations.RunPython(add_columns_to_required_tables), +] diff --git a/posthog/clickhouse/test/__snapshots__/test_schema.ambr b/posthog/clickhouse/test/__snapshots__/test_schema.ambr index fca49879f124c..c2b8416407fff 100644 --- a/posthog/clickhouse/test/__snapshots__/test_schema.ambr +++ b/posthog/clickhouse/test/__snapshots__/test_schema.ambr @@ -1758,11 +1758,12 @@ autocapture_uniq AggregateFunction(uniq, Nullable(UUID)), screen_count SimpleAggregateFunction(sum, Int64), screen_uniq AggregateFunction(uniq, Nullable(UUID)), - -- as a performance optimisation, also keep track of the uniq events for all of these combined, a bounce is a session with <2 of these - page_screen_autocapture_uniq_up_to AggregateFunction(uniqUpTo(1), Nullable(UUID)), -- replay - maybe_has_session_replay SimpleAggregateFunction(max, Bool) -- will be written False to by the events table mv and True to by the replay table mv + maybe_has_session_replay SimpleAggregateFunction(max, Bool), -- will be written False to by the events table mv and True to by the replay table mv + + -- as a performance optimisation, also keep track of the uniq events for all of these combined, a bounce is a session with <2 of these + page_screen_autocapture_uniq_up_to AggregateFunction(uniqUpTo(1), Nullable(UUID)) ) ENGINE = Distributed('posthog', 'posthog_test', 'sharded_raw_sessions', cityHash64(session_id_v7)) ''' @@ -1833,10 +1834,12 @@ uniqState(if(event='$autocapture', uuid, NULL)) as autocapture_uniq, sumIf(1, event='$screen') as screen_count, uniqState(if(event='$screen', uuid, NULL)) as screen_uniq, - uniqUpToState(1)(if(event='$pageview' OR event='$screen' OR event='$autocapture', uuid, NULL)) as page_screen_autocapture_uniq_up_to, -- replay - false as maybe_has_session_replay + false as maybe_has_session_replay, + + -- perf + uniqUpToState(1)(if(event='$pageview' OR event='$screen' OR event='$autocapture', uuid, NULL)) as page_screen_autocapture_uniq_up_to FROM posthog_test.sharded_events WHERE bitAnd(bitShiftRight(toUInt128(accurateCastOrNull(`$session_id`, 'UUID')), 76), 0xF) == 7 -- has a session id and is valid uuidv7) GROUP BY @@ -2422,11 +2425,12 @@ autocapture_uniq AggregateFunction(uniq, Nullable(UUID)), screen_count SimpleAggregateFunction(sum, Int64), screen_uniq AggregateFunction(uniq, Nullable(UUID)), - -- as a performance optimisation, also keep track of the uniq events for all of these combined, a bounce is a session with <2 of these - page_screen_autocapture_uniq_up_to AggregateFunction(uniqUpTo(1), Nullable(UUID)), -- replay - maybe_has_session_replay SimpleAggregateFunction(max, Bool) -- will be written False to by the events table mv and True to by the replay table mv + maybe_has_session_replay SimpleAggregateFunction(max, Bool), -- will be written False to by the events table mv and True to by the replay table mv + + -- as a performance optimisation, also keep track of the uniq events for all of these combined, a bounce is a session with <2 of these + page_screen_autocapture_uniq_up_to AggregateFunction(uniqUpTo(1), Nullable(UUID)) ) ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.raw_sessions', '{replica}') PARTITION BY toYYYYMM(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(session_id_v7, 80)), 1000))) @@ -2723,11 +2727,12 @@ autocapture_uniq AggregateFunction(uniq, Nullable(UUID)), screen_count SimpleAggregateFunction(sum, Int64), screen_uniq AggregateFunction(uniq, Nullable(UUID)), - -- as a performance optimisation, also keep track of the uniq events for all of these combined, a bounce is a session with <2 of these - page_screen_autocapture_uniq_up_to AggregateFunction(uniqUpTo(1), Nullable(UUID)), -- replay - maybe_has_session_replay SimpleAggregateFunction(max, Bool) -- will be written False to by the events table mv and True to by the replay table mv + maybe_has_session_replay SimpleAggregateFunction(max, Bool), -- will be written False to by the events table mv and True to by the replay table mv + + -- as a performance optimisation, also keep track of the uniq events for all of these combined, a bounce is a session with <2 of these + page_screen_autocapture_uniq_up_to AggregateFunction(uniqUpTo(1), Nullable(UUID)) ) ENGINE = Distributed('posthog', 'posthog_test', 'sharded_raw_sessions', cityHash64(session_id_v7)) ''' @@ -3533,11 +3538,12 @@ autocapture_uniq AggregateFunction(uniq, Nullable(UUID)), screen_count SimpleAggregateFunction(sum, Int64), screen_uniq AggregateFunction(uniq, Nullable(UUID)), - -- as a performance optimisation, also keep track of the uniq events for all of these combined, a bounce is a session with <2 of these - page_screen_autocapture_uniq_up_to AggregateFunction(uniqUpTo(1), Nullable(UUID)), -- replay - maybe_has_session_replay SimpleAggregateFunction(max, Bool) -- will be written False to by the events table mv and True to by the replay table mv + maybe_has_session_replay SimpleAggregateFunction(max, Bool), -- will be written False to by the events table mv and True to by the replay table mv + + -- as a performance optimisation, also keep track of the uniq events for all of these combined, a bounce is a session with <2 of these + page_screen_autocapture_uniq_up_to AggregateFunction(uniqUpTo(1), Nullable(UUID)) ) ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.raw_sessions', '{replica}') PARTITION BY toYYYYMM(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(session_id_v7, 80)), 1000))) diff --git a/posthog/clickhouse/test/test_raw_sessions_model.py b/posthog/clickhouse/test/test_raw_sessions_model.py new file mode 100644 index 0000000000000..4bac5cfcd3f86 --- /dev/null +++ b/posthog/clickhouse/test/test_raw_sessions_model.py @@ -0,0 +1,27 @@ +from posthog.clickhouse.client import sync_execute +from posthog.models.raw_sessions.sql import RAW_SESSION_TABLE_BACKFILL_SELECT_SQL +from posthog.models.utils import uuid7 +from posthog.test.base import ( + _create_event, + ClickhouseTestMixin, + BaseTest, +) + + +class TestRawSessionsModel(ClickhouseTestMixin, BaseTest): + def test_backfill_sql(self): + distinct_id = str(uuid7()) + session_id = str(uuid7()) + _create_event( + team=self.team, + event="$pageview", + distinct_id=distinct_id, + properties={"$current_url": "/", "$session_id": session_id}, + timestamp="2024-03-08", + ) + + # just test that the backfill SQL can be run without error + sync_execute( + "INSERT INTO raw_sessions" + RAW_SESSION_TABLE_BACKFILL_SELECT_SQL() + "AND team_id = %(team_id)s", + {"team_id": self.team.id}, + ) diff --git a/posthog/hogql/ast.py b/posthog/hogql/ast.py index 575085ca1ed45..5758fbe891187 100644 --- a/posthog/hogql/ast.py +++ b/posthog/hogql/ast.py @@ -683,7 +683,7 @@ class Tuple(Expr): @dataclass(kw_only=True) class Lambda(Expr): args: list[str] - expr: Expr + expr: Expr | Block @dataclass(kw_only=True) @@ -718,6 +718,12 @@ class Call(Expr): distinct: bool = False +@dataclass(kw_only=True) +class ExprCall(Expr): + expr: Expr + args: list[Expr] + + @dataclass(kw_only=True) class JoinConstraint(Expr): expr: Expr diff --git a/posthog/hogql/bytecode.py b/posthog/hogql/bytecode.py index 36d4e47e9eb72..1d71ec173d372 100644 --- a/posthog/hogql/bytecode.py +++ b/posthog/hogql/bytecode.py @@ -14,6 +14,7 @@ from hogvm.python.operation import ( Operation, HOGQL_BYTECODE_IDENTIFIER, + HOGQL_BYTECODE_VERSION, ) from posthog.schema import HogQLNotice @@ -65,7 +66,8 @@ def create_bytecode( bytecode: list[Any] = [] if args is None: bytecode.append(HOGQL_BYTECODE_IDENTIFIER) - bytecode.extend(BytecodeBuilder(supported_functions, args, context).visit(expr)) + bytecode.append(HOGQL_BYTECODE_VERSION) + bytecode.extend(BytecodeCompiler(supported_functions, args, context).visit(expr)) return bytecode @@ -82,7 +84,7 @@ class HogFunction: bytecode: list[Any] -class BytecodeBuilder(Visitor): +class BytecodeCompiler(Visitor): def __init__( self, supported_functions: Optional[set[str]] = None, @@ -97,7 +99,7 @@ def __init__( self.args = args # we're in a function definition if args is not None: - for arg in reversed(args): + for arg in args: self._declare_local(arg) self.context = context or HogQLContext(team_id=None) @@ -126,7 +128,7 @@ def _declare_local(self, name: str) -> int: def visit_and(self, node: ast.And): response = [] - for expr in reversed(node.exprs): + for expr in node.exprs: response.extend(self.visit(expr)) response.append(Operation.AND) response.append(len(node.exprs)) @@ -134,7 +136,7 @@ def visit_and(self, node: ast.And): def visit_or(self, node: ast.Or): response = [] - for expr in reversed(node.exprs): + for expr in node.exprs: response.extend(self.visit(expr)) response.append(Operation.OR) response.append(len(node.exprs)) @@ -224,12 +226,12 @@ def visit_call(self, node: ast.Call): return [*self.visit(node.args[0]), Operation.NOT] if node.name == "and" and len(node.args) > 1: args = [] - for arg in reversed(node.args): + for arg in node.args: args.extend(self.visit(arg)) return [*args, Operation.AND, len(node.args)] if node.name == "or" and len(node.args) > 1: args = [] - for arg in reversed(node.args): + for arg in node.args: args.extend(self.visit(arg)) return [*args, Operation.OR, len(node.args)] if node.name == "if" and len(node.args) >= 2: @@ -278,16 +280,16 @@ def visit_call(self, node: ast.Call): ) response = [] + for expr in node.args: + response.extend(self.visit(expr)) + if node.name in MIN_ARGS_INCLUDING_OPTIONAL and len(node.args) < MIN_ARGS_INCLUDING_OPTIONAL[node.name]: for _ in range(len(node.args), MIN_ARGS_INCLUDING_OPTIONAL[node.name]): response.append(Operation.NULL) - for expr in reversed(node.args): - response.extend(self.visit(expr)) - response.extend( [ - Operation.CALL, + Operation.CALL_GLOBAL, node.name, len(node.args) if node.name not in MIN_ARGS_INCLUDING_OPTIONAL @@ -484,18 +486,18 @@ def visit_for_in_statement(self, node: ast.ForInStatement): if key_var is not None: expr_keys_local = self._declare_local("__H_keys_H__") # keys - response.extend([Operation.GET_LOCAL, expr_local, Operation.CALL, "keys", 1]) + response.extend([Operation.GET_LOCAL, expr_local, Operation.CALL_GLOBAL, "keys", 1]) else: expr_keys_local = None expr_values_local = self._declare_local("__H_values_H__") # values - response.extend([Operation.GET_LOCAL, expr_local, Operation.CALL, "values", 1]) + response.extend([Operation.GET_LOCAL, expr_local, Operation.CALL_GLOBAL, "values", 1]) loop_index_local = self._declare_local("__H_index_H__") # 0 response.extend([Operation.INTEGER, 1]) loop_limit_local = self._declare_local("__H_limit_H__") # length of keys - response.extend([Operation.GET_LOCAL, expr_values_local, Operation.CALL, "length", 1]) + response.extend([Operation.GET_LOCAL, expr_values_local, Operation.CALL_GLOBAL, "length", 1]) if key_var is not None: key_var_local = self._declare_local(key_var) # loop key @@ -668,5 +670,9 @@ def execute_hog( if not source_code.endswith(";"): source_code = f"{source_code};" program = parse_program(source_code) - bytecode = create_bytecode(program) + bytecode = create_bytecode( + program, + supported_functions=set(functions.keys()) if functions is not None else set(), + context=HogQLContext(team_id=team.id if team else None), + ) return execute_bytecode(bytecode, globals=globals, functions=functions, timeout=timeout, team=team) diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py index 557cbfd05c01e..5e64111632997 100644 --- a/posthog/hogql/constants.py +++ b/posthog/hogql/constants.py @@ -92,6 +92,8 @@ def get_breakdown_limit_for_context(limit_context: LimitContext) -> int: class HogQLQuerySettings(BaseModel): model_config = ConfigDict(extra="forbid") optimize_aggregation_in_order: Optional[bool] = None + date_time_output_format: Optional[str] = None + date_time_input_format: Optional[str] = None # Settings applied on top of all HogQL queries. diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py index 32d54ca4feea0..3f07f8bc763a5 100644 --- a/posthog/hogql/database/database.py +++ b/posthog/hogql/database/database.py @@ -82,7 +82,6 @@ PersonsOnEventsMode, SessionTableVersion, ) -from posthog.utils import get_instance_region from posthog.warehouse.models.external_data_job import ExternalDataJob from posthog.warehouse.models.external_data_schema import ExternalDataSchema from posthog.warehouse.models.external_data_source import ExternalDataSource @@ -249,8 +248,9 @@ def create_hogql_database( join_function=join_with_persons_table, ) - if modifiers.sessionTableVersion == SessionTableVersion.V2 or ( - get_instance_region() == "EU" and modifiers.sessionTableVersion == SessionTableVersion.AUTO + if ( + modifiers.sessionTableVersion == SessionTableVersion.V2 + or modifiers.sessionTableVersion == SessionTableVersion.AUTO ): raw_sessions = RawSessionsTableV2() database.raw_sessions = raw_sessions diff --git a/posthog/hogql/database/schema/sessions_v2.py b/posthog/hogql/database/schema/sessions_v2.py index 65b9f964cd4d3..81af68a06f130 100644 --- a/posthog/hogql/database/schema/sessions_v2.py +++ b/posthog/hogql/database/schema/sessions_v2.py @@ -27,6 +27,7 @@ RAW_SELECT_SESSION_PROP_STRING_VALUES_SQL_WITH_FILTER, ) from posthog.queries.insight import insight_sync_execute +from posthog.schema import BounceRatePageViewMode if TYPE_CHECKING: from posthog.models.team import Team @@ -54,6 +55,7 @@ "autocapture_uniq": DatabaseField(name="autocapture_uniq"), "screen_uniq": DatabaseField(name="screen_uniq"), "last_external_click_url": StringDatabaseField(name="last_external_click_url"), + "page_screen_autocapture_uniq_up_to": DatabaseField(name="page_screen_autocapture_uniq_up_to"), } LAZY_SESSIONS_FIELDS: dict[str, FieldOrTable] = { @@ -90,6 +92,7 @@ ), # alias of $session_duration, deprecated but included for backwards compatibility "$is_bounce": BooleanDatabaseField(name="$is_bounce"), "$last_external_click_url": StringDatabaseField(name="$last_external_click_url"), + "$page_screen_autocapture_count_up_to": DatabaseField(name="$$page_screen_autocapture_count_up_to"), } @@ -121,6 +124,7 @@ def avoid_asterisk_fields(self) -> list[str]: "autocapture_uniq", "screen_uniq", "last_external_click_url", + "page_screen_autocapture_uniq_up_to", ] @@ -204,6 +208,11 @@ def arg_max_merge_field(field_name: str) -> ast.Call: "$screen_count": ast.Call(name="uniqMerge", args=[ast.Field(chain=[table_name, "screen_uniq"])]), "$autocapture_count": ast.Call(name="uniqMerge", args=[ast.Field(chain=[table_name, "autocapture_uniq"])]), "$last_external_click_url": null_if_empty(arg_max_merge_field("last_external_click_url")), + "$page_screen_autocapture_count_up_to": ast.Call( + name="uniqUpToMerge", + params=[ast.Constant(value=1)], + args=[ast.Field(chain=[table_name, "page_screen_autocapture_uniq_up_to"])], + ), } # Alias aggregate_fields["id"] = aggregate_fields["session_id"] @@ -231,36 +240,64 @@ def arg_max_merge_field(field_name: str) -> ast.Call: args=[aggregate_fields["$urls"]], ) - bounce_pageview_count = aggregate_fields["$pageview_count"] - aggregate_fields["$is_bounce"] = ast.Call( - name="if", - args=[ - # if pageview_count is 0, return NULL so it doesn't contribute towards the bounce rate either way - ast.Call(name="equals", args=[bounce_pageview_count, ast.Constant(value=0)]), - ast.Constant(value=None), - ast.Call( - name="not", - args=[ - ast.Call( - name="or", - args=[ - # if > 1 pageview, not a bounce - ast.Call(name="greater", args=[bounce_pageview_count, ast.Constant(value=1)]), - # if > 0 autocapture events, not a bounce - ast.Call( - name="greater", args=[aggregate_fields["$autocapture_count"], ast.Constant(value=0)] - ), - # if session duration >= 10 seconds, not a bounce - ast.Call( - name="greaterOrEquals", - args=[aggregate_fields["$session_duration"], ast.Constant(value=10)], - ), - ], - ) - ], - ), - ], - ) + if context.modifiers.bounceRatePageViewMode == BounceRatePageViewMode.UNIQ_PAGE_SCREEN_AUTOCAPTURES: + bounce_event_count = aggregate_fields["$page_screen_autocapture_count_up_to"] + aggregate_fields["$is_bounce"] = ast.Call( + name="if", + args=[ + # if the count is 0, return NULL, so it doesn't contribute towards the bounce rate either way + ast.Call(name="equals", args=[bounce_event_count, ast.Constant(value=0)]), + ast.Constant(value=None), + ast.Call( + name="not", + args=[ + ast.Call( + name="or", + args=[ + # if pageviews + autocaptures > 1, not a bounce + ast.Call(name="greater", args=[bounce_event_count, ast.Constant(value=1)]), + # if session duration >= 10 seconds, not a bounce + ast.Call( + name="greaterOrEquals", + args=[aggregate_fields["$session_duration"], ast.Constant(value=10)], + ), + ], + ) + ], + ), + ], + ) + else: + bounce_pageview_count = aggregate_fields["$pageview_count"] + aggregate_fields["$is_bounce"] = ast.Call( + name="if", + args=[ + # if pageview_count is 0, return NULL so it doesn't contribute towards the bounce rate either way + ast.Call(name="equals", args=[bounce_pageview_count, ast.Constant(value=0)]), + ast.Constant(value=None), + ast.Call( + name="not", + args=[ + ast.Call( + name="or", + args=[ + # if > 1 pageview, not a bounce + ast.Call(name="greater", args=[bounce_pageview_count, ast.Constant(value=1)]), + # if > 0 autocapture events, not a bounce + ast.Call( + name="greater", args=[aggregate_fields["$autocapture_count"], ast.Constant(value=0)] + ), + # if session duration >= 10 seconds, not a bounce + ast.Call( + name="greaterOrEquals", + args=[aggregate_fields["$session_duration"], ast.Constant(value=10)], + ), + ], + ) + ], + ), + ], + ) aggregate_fields["$channel_type"] = create_channel_type_expr( campaign=aggregate_fields["$entry_utm_campaign"], medium=aggregate_fields["$entry_utm_medium"], @@ -358,6 +395,7 @@ def get_lazy_session_table_properties_v2(search: Optional[str]): "$urls", "duration", "$num_uniq_urls", + "$page_screen_autocapture_count_up_to", } # some fields should have a specific property type which isn't derivable from the type of database field diff --git a/posthog/hogql/database/schema/test/__snapshots__/test_session_replay_events.ambr b/posthog/hogql/database/schema/test/__snapshots__/test_session_replay_events.ambr index 95d6d8919921d..b896809bf58c2 100644 --- a/posthog/hogql/database/schema/test/__snapshots__/test_session_replay_events.ambr +++ b/posthog/hogql/database/schema/test/__snapshots__/test_session_replay_events.ambr @@ -333,12 +333,12 @@ SELECT DISTINCT session_replay_events.session_id AS session_id FROM session_replay_events LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS duration, - sessions.session_id AS session_id - FROM sessions - WHERE equals(sessions.team_id, 2) - GROUP BY sessions.session_id, - sessions.session_id) AS raw_session_replay_events__session ON equals(session_replay_events.session_id, raw_session_replay_events__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS duration, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE equals(raw_sessions.team_id, 2) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS raw_session_replay_events__session ON equals(toUInt128(accurateCastOrNull(session_replay_events.session_id, 'UUID')), raw_session_replay_events__session.session_id_v7) WHERE and(equals(session_replay_events.team_id, 2), ifNull(greater(raw_session_replay_events__session.duration, 3600), 0)) GROUP BY session_replay_events.session_id HAVING ifNull(equals(dateDiff('second', min(toTimeZone(session_replay_events.min_first_timestamp, 'UTC')), max(toTimeZone(session_replay_events.max_last_timestamp, 'UTC'))), 3600), 0) diff --git a/posthog/hogql/database/schema/test/test_sessions_v2.py b/posthog/hogql/database/schema/test/test_sessions_v2.py index 28cd62625df2d..59964b1a478bf 100644 --- a/posthog/hogql/database/schema/test/test_sessions_v2.py +++ b/posthog/hogql/database/schema/test/test_sessions_v2.py @@ -2,6 +2,7 @@ from time import time_ns import pytest +from parameterized import parameterized from posthog.hogql import ast from posthog.hogql.database.schema.sessions_v2 import ( @@ -12,13 +13,20 @@ from posthog.hogql.query import execute_hogql_query from posthog.models.property_definition import PropertyType from posthog.models.utils import uuid7 -from posthog.schema import HogQLQueryModifiers, SessionTableVersion -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person +from posthog.schema import HogQLQueryModifiers, SessionTableVersion, BounceRatePageViewMode +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + _create_person, +) class TestSessionsV2(ClickhouseTestMixin, APIBaseTest): - def __execute(self, query): - modifiers = HogQLQueryModifiers(sessionTableVersion=SessionTableVersion.V2) + def __execute(self, query, bounce_rate_mode=BounceRatePageViewMode.COUNT_PAGEVIEWS): + modifiers = HogQLQueryModifiers( + sessionTableVersion=SessionTableVersion.V2, bounceRatePageViewMode=bounce_rate_mode + ) return execute_hogql_query( query=query, team=self.team, @@ -309,7 +317,105 @@ def test_idempotent_event_counts(self): ) self.assertEqual(response.results or [], [(1, 1, 1)]) - def test_bounce_rate(self): + def test_page_screen_autocapture_count_up_to(self): + time = time_ns() // (10**6) + + # two pageviews + s1 = str(uuid7(time)) + _create_event( + event="$pageview", + team=self.team, + distinct_id=s1, + properties={"$session_id": s1}, + timestamp="2023-12-02", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=s1, + properties={"$session_id": s1}, + timestamp="2023-12-03", + ) + # one pageview and one autocapture + s2 = str(uuid7(time + 2)) + _create_event( + event="$pageview", + team=self.team, + distinct_id=s1, + properties={"$session_id": s2}, + timestamp="2023-12-02", + ) + _create_event( + event="$autocapture", + team=self.team, + distinct_id=s1, + properties={"$session_id": s2}, + timestamp="2023-12-03", + ) + # one pageview + s3 = str(uuid7(time + 3)) + _create_event( + event="$pageview", + team=self.team, + distinct_id=s1, + properties={"$session_id": s3}, + timestamp="2023-12-02", + ) + # three pageviews (should still count as 2) + s4 = str(uuid7(time + 4)) + _create_event( + event="$pageview", + team=self.team, + distinct_id=s1, + properties={"$session_id": s4}, + timestamp="2023-12-02", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=s1, + properties={"$session_id": s4}, + timestamp="2023-12-02", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=s1, + properties={"$session_id": s4}, + timestamp="2023-12-02", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=s1, + properties={"$session_id": s4}, + timestamp="2023-12-02", + ) + # one screen + s5 = str(uuid7(time + 5)) + _create_event( + event="$screen", + team=self.team, + distinct_id=s1, + properties={"$session_id": s5}, + timestamp="2023-12-02", + ) + + results = ( + self.__execute( + parse_select( + "select $page_screen_autocapture_count_up_to from sessions ORDER BY session_id", + placeholders={"session_id": ast.Constant(value=s1)}, + ), + ).results + or [] + ) + assert results == [(2,), (2,), (1,), (2,), (1,)] + + @parameterized.expand( + [[BounceRatePageViewMode.UNIQ_PAGE_SCREEN_AUTOCAPTURES], [BounceRatePageViewMode.COUNT_PAGEVIEWS]] + ) + def test_bounce_rate(self, bounce_rate_mode): time = time_ns() // (10**6) # ensure the sessions ids are sortable by giving them different time components s1a = str(uuid7(time)) @@ -398,18 +504,16 @@ def test_bounce_rate(self): parse_select( "select $is_bounce, session_id from sessions ORDER BY session_id", ), - ) - self.assertEqual( - [ - (0, s1a), - (1, s1b), - (1, s2), - (0, s3), - (1, s4), - (0, s5), - ], - response.results or [], - ) + bounce_rate_mode=bounce_rate_mode, + ) + assert (response.results or []) == [ + (0, s1a), + (1, s1b), + (1, s2), + (0, s3), + (1, s4), + (0, s5), + ] def test_last_external_click_url(self): s1 = str(uuid7()) diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr index 77a2452922e7e..6e4834dda1031 100644 --- a/posthog/hogql/database/test/__snapshots__/test_database.ambr +++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr @@ -346,6 +346,7 @@ "fields": [ "id", "session_id", + "session_id_v7", "team_id", "distinct_id", "$start_timestamp", @@ -354,8 +355,8 @@ "$num_uniq_urls", "$entry_current_url", "$entry_pathname", - "$exit_current_url", - "$exit_pathname", + "$end_current_url", + "$end_pathname", "$entry_utm_source", "$entry_utm_campaign", "$entry_utm_medium", @@ -364,13 +365,15 @@ "$entry_referring_domain", "$entry_gclid", "$entry_gad_source", - "$event_count_map", "$pageview_count", "$autocapture_count", + "$screen_count", "$channel_type", "$session_duration", "duration", - "$is_bounce" + "$is_bounce", + "$last_external_click_url", + "$page_screen_autocapture_count_up_to" ], "hogql_value": "session", "name": "session", @@ -832,6 +835,7 @@ "fields": [ "id", "session_id", + "session_id_v7", "team_id", "distinct_id", "$start_timestamp", @@ -840,8 +844,8 @@ "$num_uniq_urls", "$entry_current_url", "$entry_pathname", - "$exit_current_url", - "$exit_pathname", + "$end_current_url", + "$end_pathname", "$entry_utm_source", "$entry_utm_campaign", "$entry_utm_medium", @@ -850,13 +854,15 @@ "$entry_referring_domain", "$entry_gclid", "$entry_gad_source", - "$event_count_map", "$pageview_count", "$autocapture_count", + "$screen_count", "$channel_type", "$session_duration", "duration", - "$is_bounce" + "$is_bounce", + "$last_external_click_url", + "$page_screen_autocapture_count_up_to" ], "hogql_value": "session", "name": "session", @@ -1054,6 +1060,15 @@ "table": null, "type": "string" }, + "session_id_v7": { + "chain": null, + "fields": null, + "hogql_value": "session_id_v7", + "name": "session_id_v7", + "schema_valid": true, + "table": null, + "type": "integer" + }, "distinct_id": { "chain": null, "fields": null, @@ -1117,20 +1132,20 @@ "table": null, "type": "string" }, - "$exit_current_url": { + "$end_current_url": { "chain": null, "fields": null, - "hogql_value": "`$exit_current_url`", - "name": "$exit_current_url", + "hogql_value": "`$end_current_url`", + "name": "$end_current_url", "schema_valid": true, "table": null, "type": "string" }, - "$exit_pathname": { + "$end_pathname": { "chain": null, "fields": null, - "hogql_value": "`$exit_pathname`", - "name": "$exit_pathname", + "hogql_value": "`$end_pathname`", + "name": "$end_pathname", "schema_valid": true, "table": null, "type": "string" @@ -1225,6 +1240,15 @@ "table": null, "type": "integer" }, + "$screen_count": { + "chain": null, + "fields": null, + "hogql_value": "`$screen_count`", + "name": "$screen_count", + "schema_valid": true, + "table": null, + "type": "integer" + }, "$channel_type": { "chain": null, "fields": null, @@ -1260,6 +1284,15 @@ "schema_valid": true, "table": null, "type": "boolean" + }, + "$last_external_click_url": { + "chain": null, + "fields": null, + "hogql_value": "`$last_external_click_url`", + "name": "$last_external_click_url", + "schema_valid": true, + "table": null, + "type": "string" } }, "id": "sessions", @@ -1703,6 +1736,7 @@ "fields": [ "id", "session_id", + "session_id_v7", "team_id", "distinct_id", "$start_timestamp", @@ -1711,8 +1745,8 @@ "$num_uniq_urls", "$entry_current_url", "$entry_pathname", - "$exit_current_url", - "$exit_pathname", + "$end_current_url", + "$end_pathname", "$entry_utm_source", "$entry_utm_campaign", "$entry_utm_medium", @@ -1721,13 +1755,15 @@ "$entry_referring_domain", "$entry_gclid", "$entry_gad_source", - "$event_count_map", "$pageview_count", "$autocapture_count", + "$screen_count", "$channel_type", "$session_duration", "duration", - "$is_bounce" + "$is_bounce", + "$last_external_click_url", + "$page_screen_autocapture_count_up_to" ], "hogql_value": "session", "name": "session", @@ -2189,6 +2225,7 @@ "fields": [ "id", "session_id", + "session_id_v7", "team_id", "distinct_id", "$start_timestamp", @@ -2197,8 +2234,8 @@ "$num_uniq_urls", "$entry_current_url", "$entry_pathname", - "$exit_current_url", - "$exit_pathname", + "$end_current_url", + "$end_pathname", "$entry_utm_source", "$entry_utm_campaign", "$entry_utm_medium", @@ -2207,13 +2244,15 @@ "$entry_referring_domain", "$entry_gclid", "$entry_gad_source", - "$event_count_map", "$pageview_count", "$autocapture_count", + "$screen_count", "$channel_type", "$session_duration", "duration", - "$is_bounce" + "$is_bounce", + "$last_external_click_url", + "$page_screen_autocapture_count_up_to" ], "hogql_value": "session", "name": "session", @@ -2411,6 +2450,15 @@ "table": null, "type": "string" }, + "session_id_v7": { + "chain": null, + "fields": null, + "hogql_value": "session_id_v7", + "name": "session_id_v7", + "schema_valid": true, + "table": null, + "type": "integer" + }, "distinct_id": { "chain": null, "fields": null, @@ -2474,20 +2522,20 @@ "table": null, "type": "string" }, - "$exit_current_url": { + "$end_current_url": { "chain": null, "fields": null, - "hogql_value": "`$exit_current_url`", - "name": "$exit_current_url", + "hogql_value": "`$end_current_url`", + "name": "$end_current_url", "schema_valid": true, "table": null, "type": "string" }, - "$exit_pathname": { + "$end_pathname": { "chain": null, "fields": null, - "hogql_value": "`$exit_pathname`", - "name": "$exit_pathname", + "hogql_value": "`$end_pathname`", + "name": "$end_pathname", "schema_valid": true, "table": null, "type": "string" @@ -2582,6 +2630,15 @@ "table": null, "type": "integer" }, + "$screen_count": { + "chain": null, + "fields": null, + "hogql_value": "`$screen_count`", + "name": "$screen_count", + "schema_valid": true, + "table": null, + "type": "integer" + }, "$channel_type": { "chain": null, "fields": null, @@ -2617,6 +2674,15 @@ "schema_valid": true, "table": null, "type": "boolean" + }, + "$last_external_click_url": { + "chain": null, + "fields": null, + "hogql_value": "`$last_external_click_url`", + "name": "$last_external_click_url", + "schema_valid": true, + "table": null, + "type": "string" } }, "id": "sessions", diff --git a/posthog/hogql/functions/mapping.py b/posthog/hogql/functions/mapping.py index f1460cfcd9f5f..586eed0c4a274 100644 --- a/posthog/hogql/functions/mapping.py +++ b/posthog/hogql/functions/mapping.py @@ -838,6 +838,9 @@ def compare_types(arg_types: list[ConstantType], sig_arg_types: tuple[ConstantTy "aggregate_funnel": HogQLFunctionMeta("aggregate_funnel", 6, 6, aggregate=False), "aggregate_funnel_array": HogQLFunctionMeta("aggregate_funnel_array", 6, 6, aggregate=False), "aggregate_funnel_cohort": HogQLFunctionMeta("aggregate_funnel_cohort", 6, 6, aggregate=False), + "aggregate_funnel_trends": HogQLFunctionMeta("aggregate_funnel_trends", 7, 7, aggregate=False), + "aggregate_funnel_array_trends": HogQLFunctionMeta("aggregate_funnel_array_trends", 7, 7, aggregate=False), + "aggregate_funnel_cohort_trends": HogQLFunctionMeta("aggregate_funnel_cohort_trends", 7, 7, aggregate=False), "aggregate_funnel_test": HogQLFunctionMeta("aggregate_funnel_test", 6, 6, aggregate=False), } # Permitted HogQL aggregations @@ -949,7 +952,8 @@ def compare_types(arg_types: list[ConstantType], sig_arg_types: tuple[ConstantTy "uniqHLL12If": HogQLFunctionMeta("uniqHLL12If", 2, None, aggregate=True), "uniqTheta": HogQLFunctionMeta("uniqTheta", 1, None, aggregate=True), "uniqThetaIf": HogQLFunctionMeta("uniqThetaIf", 2, None, aggregate=True), - "uniqMerge": HogQLFunctionMeta("uniqMerge", 1, None, aggregate=True), + "uniqMerge": HogQLFunctionMeta("uniqMerge", 1, 1, aggregate=True), + "uniqUpToMerge": HogQLFunctionMeta("uniqUpToMerge", 1, 1, 1, 1, aggregate=True), "median": HogQLFunctionMeta("median", 1, 1, aggregate=True), "medianIf": HogQLFunctionMeta("medianIf", 2, 2, aggregate=True), "medianExact": HogQLFunctionMeta("medianExact", 1, 1, aggregate=True), diff --git a/posthog/hogql/grammar/HogQLLexer.py b/posthog/hogql/grammar/HogQLLexer.py index 1ce68bc24a643..4647d13b29108 100644 --- a/posthog/hogql/grammar/HogQLLexer.py +++ b/posthog/hogql/grammar/HogQLLexer.py @@ -1,4 +1,4 @@ -# Generated from HogQLLexer.g4 by ANTLR 4.13.1 +# Generated from HogQLLexer.g4 by ANTLR 4.13.2 from antlr4 import * from io import StringIO import sys @@ -830,7 +830,7 @@ class HogQLLexer(Lexer): def __init__(self, input=None, output:TextIO = sys.stdout): super().__init__(input, output) - self.checkVersion("4.13.1") + self.checkVersion("4.13.2") self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) self._actions = None self._predicates = None diff --git a/posthog/hogql/grammar/HogQLParser.g4 b/posthog/hogql/grammar/HogQLParser.g4 index 4a6e187643877..6e86a6f86afd9 100644 --- a/posthog/hogql/grammar/HogQLParser.g4 +++ b/posthog/hogql/grammar/HogQLParser.g4 @@ -151,9 +151,10 @@ columnExpr | SUBSTRING LPAREN columnExpr FROM columnExpr (FOR columnExpr)? RPAREN # ColumnExprSubstring | TIMESTAMP STRING_LITERAL # ColumnExprTimestamp | TRIM LPAREN (BOTH | LEADING | TRAILING) string FROM columnExpr RPAREN # ColumnExprTrim - | identifier (LPAREN columnExprList? RPAREN) (LPAREN DISTINCT? columnArgList? RPAREN)? OVER LPAREN windowExpr RPAREN # ColumnExprWinFunction - | identifier (LPAREN columnExprList? RPAREN) (LPAREN DISTINCT? columnArgList? RPAREN)? OVER identifier # ColumnExprWinFunctionTarget - | identifier (LPAREN columnExprList? RPAREN)? LPAREN DISTINCT? columnArgList? RPAREN # ColumnExprFunction + | identifier (LPAREN columnExprs=columnExprList? RPAREN) (LPAREN DISTINCT? columnArgList=columnExprList? RPAREN)? OVER LPAREN windowExpr RPAREN # ColumnExprWinFunction + | identifier (LPAREN columnExprs=columnExprList? RPAREN) (LPAREN DISTINCT? columnArgList=columnExprList? RPAREN)? OVER identifier # ColumnExprWinFunctionTarget + | identifier (LPAREN columnExprs=columnExprList? RPAREN)? LPAREN DISTINCT? columnArgList=columnExprList? RPAREN # ColumnExprFunction + | columnExpr LPAREN columnExprList? RPAREN # ColumnExprCall | hogqlxTagElement # ColumnExprTagElement | templateString # ColumnExprTemplateString | literal # ColumnExprLiteral @@ -198,24 +199,23 @@ columnExpr // TODO(ilezhankin): `BETWEEN a AND b AND c` is parsed in a wrong way: `BETWEEN (a AND b) AND c` | columnExpr NOT? BETWEEN columnExpr AND columnExpr # ColumnExprBetween | columnExpr QUERY columnExpr COLON columnExpr # ColumnExprTernaryOp - | columnExpr (AS identifier | AS STRING_LITERAL) # ColumnExprAlias - + | columnExpr (AS identifier | AS STRING_LITERAL) # ColumnExprAlias | (tableIdentifier DOT)? ASTERISK # ColumnExprAsterisk // single-column only | LPAREN selectUnionStmt RPAREN # ColumnExprSubquery // single-column only | LPAREN columnExpr RPAREN # ColumnExprParens // single-column only | LPAREN columnExprList RPAREN # ColumnExprTuple | LBRACKET columnExprList? RBRACKET # ColumnExprArray | LBRACE (kvPairList)? RBRACE # ColumnExprDict + | columnLambdaExpr # ColumnExprLambda | columnIdentifier # ColumnExprIdentifier ; -columnArgList: columnArgExpr (COMMA columnArgExpr)* COMMA?; -columnArgExpr: columnLambdaExpr | columnExpr; columnLambdaExpr: ( LPAREN identifier (COMMA identifier)* COMMA? RPAREN | identifier (COMMA identifier)* COMMA? + | LPAREN RPAREN ) - ARROW columnExpr + ARROW (columnExpr | block) ; diff --git a/posthog/hogql/grammar/HogQLParser.interp b/posthog/hogql/grammar/HogQLParser.interp index 183059ec0fd79..b965cfbb577c7 100644 --- a/posthog/hogql/grammar/HogQLParser.interp +++ b/posthog/hogql/grammar/HogQLParser.interp @@ -382,8 +382,6 @@ expr columnTypeExpr columnExprList columnExpr -columnArgList -columnArgExpr columnLambdaExpr hogqlxTagElement hogqlxTagAttribute @@ -414,4 +412,4 @@ stringContentsFull atn: -[4, 1, 159, 1311, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 1, 0, 5, 0, 178, 8, 0, 10, 0, 12, 0, 181, 9, 0, 1, 0, 1, 0, 1, 1, 1, 1, 3, 1, 187, 8, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 196, 8, 3, 1, 4, 1, 4, 1, 4, 5, 4, 201, 8, 4, 10, 4, 12, 4, 204, 9, 4, 1, 4, 3, 4, 207, 8, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 221, 8, 5, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 6, 3, 6, 228, 8, 6, 1, 7, 1, 7, 3, 7, 232, 8, 7, 1, 7, 3, 7, 235, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 242, 8, 8, 1, 8, 1, 8, 3, 8, 246, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 5, 9, 253, 8, 9, 10, 9, 12, 9, 256, 9, 9, 1, 9, 1, 9, 3, 9, 260, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 269, 8, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 277, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 3, 12, 284, 8, 12, 1, 12, 1, 12, 3, 12, 288, 8, 12, 1, 12, 1, 12, 1, 12, 1, 12, 3, 12, 294, 8, 12, 1, 12, 1, 12, 1, 12, 3, 12, 299, 8, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 307, 8, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 314, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 320, 8, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 3, 16, 332, 8, 16, 1, 17, 1, 17, 1, 18, 1, 18, 5, 18, 338, 8, 18, 10, 18, 12, 18, 341, 9, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 5, 20, 352, 8, 20, 10, 20, 12, 20, 355, 9, 20, 1, 20, 3, 20, 358, 8, 20, 1, 21, 1, 21, 1, 21, 3, 21, 363, 8, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 371, 8, 22, 10, 22, 12, 22, 374, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 3, 23, 382, 8, 23, 1, 24, 3, 24, 385, 8, 24, 1, 24, 1, 24, 3, 24, 389, 8, 24, 1, 24, 3, 24, 392, 8, 24, 1, 24, 1, 24, 3, 24, 396, 8, 24, 1, 24, 3, 24, 399, 8, 24, 1, 24, 3, 24, 402, 8, 24, 1, 24, 3, 24, 405, 8, 24, 1, 24, 3, 24, 408, 8, 24, 1, 24, 1, 24, 3, 24, 412, 8, 24, 1, 24, 1, 24, 3, 24, 416, 8, 24, 1, 24, 3, 24, 419, 8, 24, 1, 24, 3, 24, 422, 8, 24, 1, 24, 3, 24, 425, 8, 24, 1, 24, 1, 24, 3, 24, 429, 8, 24, 1, 24, 3, 24, 432, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 441, 8, 26, 1, 27, 1, 27, 1, 27, 1, 28, 3, 28, 447, 8, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 466, 8, 29, 10, 29, 12, 29, 469, 9, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 485, 8, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 502, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 508, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 514, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 525, 8, 36, 3, 36, 527, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 3, 39, 538, 8, 39, 1, 39, 3, 39, 541, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 547, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 555, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 561, 8, 39, 10, 39, 12, 39, 564, 9, 39, 1, 40, 3, 40, 567, 8, 40, 1, 40, 1, 40, 1, 40, 3, 40, 572, 8, 40, 1, 40, 3, 40, 575, 8, 40, 1, 40, 3, 40, 578, 8, 40, 1, 40, 1, 40, 3, 40, 582, 8, 40, 1, 40, 1, 40, 3, 40, 586, 8, 40, 1, 40, 3, 40, 589, 8, 40, 3, 40, 591, 8, 40, 1, 40, 3, 40, 594, 8, 40, 1, 40, 1, 40, 3, 40, 598, 8, 40, 1, 40, 1, 40, 3, 40, 602, 8, 40, 1, 40, 3, 40, 605, 8, 40, 3, 40, 607, 8, 40, 3, 40, 609, 8, 40, 1, 41, 1, 41, 1, 41, 3, 41, 614, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 625, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 3, 43, 631, 8, 43, 1, 44, 1, 44, 1, 44, 5, 44, 636, 8, 44, 10, 44, 12, 44, 639, 9, 44, 1, 45, 1, 45, 3, 45, 643, 8, 45, 1, 45, 1, 45, 3, 45, 647, 8, 45, 1, 45, 1, 45, 3, 45, 651, 8, 45, 1, 46, 1, 46, 1, 46, 1, 46, 3, 46, 657, 8, 46, 3, 46, 659, 8, 46, 1, 47, 1, 47, 1, 47, 5, 47, 664, 8, 47, 10, 47, 12, 47, 667, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 3, 49, 674, 8, 49, 1, 49, 3, 49, 677, 8, 49, 1, 49, 3, 49, 680, 8, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 699, 8, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 3, 54, 713, 8, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 727, 8, 56, 10, 56, 12, 56, 730, 9, 56, 1, 56, 3, 56, 733, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 742, 8, 56, 10, 56, 12, 56, 745, 9, 56, 1, 56, 3, 56, 748, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 757, 8, 56, 10, 56, 12, 56, 760, 9, 56, 1, 56, 3, 56, 763, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 770, 8, 56, 1, 56, 1, 56, 3, 56, 774, 8, 56, 1, 57, 1, 57, 1, 57, 5, 57, 779, 8, 57, 10, 57, 12, 57, 782, 9, 57, 1, 57, 3, 57, 785, 8, 57, 1, 58, 1, 58, 1, 58, 3, 58, 790, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 4, 58, 797, 8, 58, 11, 58, 12, 58, 798, 1, 58, 1, 58, 3, 58, 803, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 827, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 844, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 850, 8, 58, 1, 58, 3, 58, 853, 8, 58, 1, 58, 3, 58, 856, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 866, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 872, 8, 58, 1, 58, 3, 58, 875, 8, 58, 1, 58, 3, 58, 878, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 886, 8, 58, 1, 58, 3, 58, 889, 8, 58, 1, 58, 1, 58, 3, 58, 893, 8, 58, 1, 58, 3, 58, 896, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 910, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 927, 8, 58, 1, 58, 1, 58, 1, 58, 3, 58, 932, 8, 58, 1, 58, 1, 58, 3, 58, 936, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 942, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 949, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 961, 8, 58, 1, 58, 1, 58, 3, 58, 965, 8, 58, 1, 58, 3, 58, 968, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 977, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 991, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 1030, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 1038, 8, 58, 5, 58, 1040, 8, 58, 10, 58, 12, 58, 1043, 9, 58, 1, 59, 1, 59, 1, 59, 5, 59, 1048, 8, 59, 10, 59, 12, 59, 1051, 9, 59, 1, 59, 3, 59, 1054, 8, 59, 1, 60, 1, 60, 3, 60, 1058, 8, 60, 1, 61, 1, 61, 1, 61, 1, 61, 5, 61, 1064, 8, 61, 10, 61, 12, 61, 1067, 9, 61, 1, 61, 3, 61, 1070, 8, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 5, 61, 1077, 8, 61, 10, 61, 12, 61, 1080, 9, 61, 1, 61, 3, 61, 1083, 8, 61, 3, 61, 1085, 8, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 5, 62, 1093, 8, 62, 10, 62, 12, 62, 1096, 9, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 5, 62, 1104, 8, 62, 10, 62, 12, 62, 1107, 9, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 3, 62, 1115, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 3, 62, 1122, 8, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 3, 63, 1135, 8, 63, 1, 64, 1, 64, 1, 64, 5, 64, 1140, 8, 64, 10, 64, 12, 64, 1143, 9, 64, 1, 64, 3, 64, 1146, 8, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 3, 65, 1158, 8, 65, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 1164, 8, 66, 1, 66, 3, 66, 1167, 8, 66, 1, 67, 1, 67, 1, 67, 5, 67, 1172, 8, 67, 10, 67, 12, 67, 1175, 9, 67, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 1, 68, 3, 68, 1186, 8, 68, 1, 68, 1, 68, 1, 68, 1, 68, 3, 68, 1192, 8, 68, 5, 68, 1194, 8, 68, 10, 68, 12, 68, 1197, 9, 68, 1, 69, 1, 69, 1, 69, 3, 69, 1202, 8, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 3, 70, 1209, 8, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 5, 71, 1216, 8, 71, 10, 71, 12, 71, 1219, 9, 71, 1, 71, 3, 71, 1222, 8, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 3, 73, 1232, 8, 73, 3, 73, 1234, 8, 73, 1, 74, 3, 74, 1237, 8, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 3, 74, 1245, 8, 74, 1, 75, 1, 75, 1, 75, 3, 75, 1250, 8, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 3, 79, 1260, 8, 79, 1, 80, 1, 80, 1, 80, 3, 80, 1265, 8, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 3, 83, 1277, 8, 83, 1, 84, 1, 84, 5, 84, 1281, 8, 84, 10, 84, 12, 84, 1284, 9, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 3, 85, 1293, 8, 85, 1, 86, 1, 86, 5, 86, 1297, 8, 86, 10, 86, 12, 86, 1300, 9, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 3, 87, 1309, 8, 87, 1, 87, 0, 3, 78, 116, 136, 88, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 172, 174, 0, 16, 2, 0, 18, 18, 74, 74, 2, 0, 44, 44, 51, 51, 3, 0, 1, 1, 4, 4, 8, 8, 4, 0, 1, 1, 3, 4, 8, 8, 80, 80, 2, 0, 51, 51, 73, 73, 2, 0, 1, 1, 4, 4, 2, 0, 7, 7, 22, 23, 2, 0, 30, 30, 49, 49, 2, 0, 71, 71, 76, 76, 3, 0, 10, 10, 50, 50, 90, 90, 2, 0, 41, 41, 53, 53, 1, 0, 107, 108, 2, 0, 118, 118, 139, 139, 7, 0, 21, 21, 38, 38, 55, 56, 70, 70, 78, 78, 97, 97, 103, 103, 16, 0, 1, 13, 15, 20, 22, 28, 30, 30, 32, 37, 39, 42, 44, 51, 53, 54, 58, 58, 60, 69, 71, 77, 79, 83, 85, 92, 94, 96, 98, 99, 101, 102, 4, 0, 20, 20, 30, 30, 39, 39, 48, 48, 1479, 0, 179, 1, 0, 0, 0, 2, 186, 1, 0, 0, 0, 4, 188, 1, 0, 0, 0, 6, 190, 1, 0, 0, 0, 8, 197, 1, 0, 0, 0, 10, 220, 1, 0, 0, 0, 12, 222, 1, 0, 0, 0, 14, 229, 1, 0, 0, 0, 16, 236, 1, 0, 0, 0, 18, 249, 1, 0, 0, 0, 20, 261, 1, 0, 0, 0, 22, 270, 1, 0, 0, 0, 24, 278, 1, 0, 0, 0, 26, 300, 1, 0, 0, 0, 28, 315, 1, 0, 0, 0, 30, 324, 1, 0, 0, 0, 32, 329, 1, 0, 0, 0, 34, 333, 1, 0, 0, 0, 36, 335, 1, 0, 0, 0, 38, 344, 1, 0, 0, 0, 40, 348, 1, 0, 0, 0, 42, 362, 1, 0, 0, 0, 44, 366, 1, 0, 0, 0, 46, 381, 1, 0, 0, 0, 48, 384, 1, 0, 0, 0, 50, 433, 1, 0, 0, 0, 52, 436, 1, 0, 0, 0, 54, 442, 1, 0, 0, 0, 56, 446, 1, 0, 0, 0, 58, 452, 1, 0, 0, 0, 60, 470, 1, 0, 0, 0, 62, 473, 1, 0, 0, 0, 64, 476, 1, 0, 0, 0, 66, 486, 1, 0, 0, 0, 68, 489, 1, 0, 0, 0, 70, 493, 1, 0, 0, 0, 72, 526, 1, 0, 0, 0, 74, 528, 1, 0, 0, 0, 76, 531, 1, 0, 0, 0, 78, 546, 1, 0, 0, 0, 80, 608, 1, 0, 0, 0, 82, 613, 1, 0, 0, 0, 84, 624, 1, 0, 0, 0, 86, 626, 1, 0, 0, 0, 88, 632, 1, 0, 0, 0, 90, 640, 1, 0, 0, 0, 92, 658, 1, 0, 0, 0, 94, 660, 1, 0, 0, 0, 96, 668, 1, 0, 0, 0, 98, 673, 1, 0, 0, 0, 100, 681, 1, 0, 0, 0, 102, 685, 1, 0, 0, 0, 104, 689, 1, 0, 0, 0, 106, 698, 1, 0, 0, 0, 108, 712, 1, 0, 0, 0, 110, 714, 1, 0, 0, 0, 112, 773, 1, 0, 0, 0, 114, 775, 1, 0, 0, 0, 116, 935, 1, 0, 0, 0, 118, 1044, 1, 0, 0, 0, 120, 1057, 1, 0, 0, 0, 122, 1084, 1, 0, 0, 0, 124, 1121, 1, 0, 0, 0, 126, 1134, 1, 0, 0, 0, 128, 1136, 1, 0, 0, 0, 130, 1157, 1, 0, 0, 0, 132, 1166, 1, 0, 0, 0, 134, 1168, 1, 0, 0, 0, 136, 1185, 1, 0, 0, 0, 138, 1198, 1, 0, 0, 0, 140, 1208, 1, 0, 0, 0, 142, 1212, 1, 0, 0, 0, 144, 1223, 1, 0, 0, 0, 146, 1233, 1, 0, 0, 0, 148, 1236, 1, 0, 0, 0, 150, 1249, 1, 0, 0, 0, 152, 1251, 1, 0, 0, 0, 154, 1253, 1, 0, 0, 0, 156, 1255, 1, 0, 0, 0, 158, 1259, 1, 0, 0, 0, 160, 1264, 1, 0, 0, 0, 162, 1266, 1, 0, 0, 0, 164, 1270, 1, 0, 0, 0, 166, 1276, 1, 0, 0, 0, 168, 1278, 1, 0, 0, 0, 170, 1292, 1, 0, 0, 0, 172, 1294, 1, 0, 0, 0, 174, 1308, 1, 0, 0, 0, 176, 178, 3, 2, 1, 0, 177, 176, 1, 0, 0, 0, 178, 181, 1, 0, 0, 0, 179, 177, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 182, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 182, 183, 5, 0, 0, 1, 183, 1, 1, 0, 0, 0, 184, 187, 3, 6, 3, 0, 185, 187, 3, 10, 5, 0, 186, 184, 1, 0, 0, 0, 186, 185, 1, 0, 0, 0, 187, 3, 1, 0, 0, 0, 188, 189, 3, 116, 58, 0, 189, 5, 1, 0, 0, 0, 190, 191, 5, 52, 0, 0, 191, 195, 3, 160, 80, 0, 192, 193, 5, 115, 0, 0, 193, 194, 5, 122, 0, 0, 194, 196, 3, 4, 2, 0, 195, 192, 1, 0, 0, 0, 195, 196, 1, 0, 0, 0, 196, 7, 1, 0, 0, 0, 197, 202, 3, 160, 80, 0, 198, 199, 5, 116, 0, 0, 199, 201, 3, 160, 80, 0, 200, 198, 1, 0, 0, 0, 201, 204, 1, 0, 0, 0, 202, 200, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 206, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 205, 207, 5, 116, 0, 0, 206, 205, 1, 0, 0, 0, 206, 207, 1, 0, 0, 0, 207, 9, 1, 0, 0, 0, 208, 221, 3, 12, 6, 0, 209, 221, 3, 14, 7, 0, 210, 221, 3, 18, 9, 0, 211, 221, 3, 20, 10, 0, 212, 221, 3, 22, 11, 0, 213, 221, 3, 26, 13, 0, 214, 221, 3, 24, 12, 0, 215, 221, 3, 28, 14, 0, 216, 221, 3, 30, 15, 0, 217, 221, 3, 36, 18, 0, 218, 221, 3, 32, 16, 0, 219, 221, 3, 34, 17, 0, 220, 208, 1, 0, 0, 0, 220, 209, 1, 0, 0, 0, 220, 210, 1, 0, 0, 0, 220, 211, 1, 0, 0, 0, 220, 212, 1, 0, 0, 0, 220, 213, 1, 0, 0, 0, 220, 214, 1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 220, 216, 1, 0, 0, 0, 220, 217, 1, 0, 0, 0, 220, 218, 1, 0, 0, 0, 220, 219, 1, 0, 0, 0, 221, 11, 1, 0, 0, 0, 222, 224, 5, 72, 0, 0, 223, 225, 3, 4, 2, 0, 224, 223, 1, 0, 0, 0, 224, 225, 1, 0, 0, 0, 225, 227, 1, 0, 0, 0, 226, 228, 5, 150, 0, 0, 227, 226, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 13, 1, 0, 0, 0, 229, 231, 5, 84, 0, 0, 230, 232, 3, 4, 2, 0, 231, 230, 1, 0, 0, 0, 231, 232, 1, 0, 0, 0, 232, 234, 1, 0, 0, 0, 233, 235, 5, 150, 0, 0, 234, 233, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 15, 1, 0, 0, 0, 236, 245, 5, 14, 0, 0, 237, 238, 5, 130, 0, 0, 238, 241, 3, 160, 80, 0, 239, 240, 5, 115, 0, 0, 240, 242, 3, 160, 80, 0, 241, 239, 1, 0, 0, 0, 241, 242, 1, 0, 0, 0, 242, 243, 1, 0, 0, 0, 243, 244, 5, 149, 0, 0, 244, 246, 1, 0, 0, 0, 245, 237, 1, 0, 0, 0, 245, 246, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 248, 3, 36, 18, 0, 248, 17, 1, 0, 0, 0, 249, 250, 5, 93, 0, 0, 250, 254, 3, 36, 18, 0, 251, 253, 3, 16, 8, 0, 252, 251, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 259, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 258, 5, 29, 0, 0, 258, 260, 3, 36, 18, 0, 259, 257, 1, 0, 0, 0, 259, 260, 1, 0, 0, 0, 260, 19, 1, 0, 0, 0, 261, 262, 5, 40, 0, 0, 262, 263, 5, 130, 0, 0, 263, 264, 3, 4, 2, 0, 264, 265, 5, 149, 0, 0, 265, 268, 3, 10, 5, 0, 266, 267, 5, 25, 0, 0, 267, 269, 3, 10, 5, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, 21, 1, 0, 0, 0, 270, 271, 5, 100, 0, 0, 271, 272, 5, 130, 0, 0, 272, 273, 3, 4, 2, 0, 273, 274, 5, 149, 0, 0, 274, 276, 3, 10, 5, 0, 275, 277, 5, 150, 0, 0, 276, 275, 1, 0, 0, 0, 276, 277, 1, 0, 0, 0, 277, 23, 1, 0, 0, 0, 278, 279, 5, 33, 0, 0, 279, 283, 5, 130, 0, 0, 280, 284, 3, 6, 3, 0, 281, 284, 3, 30, 15, 0, 282, 284, 3, 4, 2, 0, 283, 280, 1, 0, 0, 0, 283, 281, 1, 0, 0, 0, 283, 282, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 287, 5, 150, 0, 0, 286, 288, 3, 4, 2, 0, 287, 286, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 289, 1, 0, 0, 0, 289, 293, 5, 150, 0, 0, 290, 294, 3, 6, 3, 0, 291, 294, 3, 30, 15, 0, 292, 294, 3, 4, 2, 0, 293, 290, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 293, 292, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 296, 5, 149, 0, 0, 296, 298, 3, 10, 5, 0, 297, 299, 5, 150, 0, 0, 298, 297, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 25, 1, 0, 0, 0, 300, 301, 5, 33, 0, 0, 301, 302, 5, 130, 0, 0, 302, 303, 5, 52, 0, 0, 303, 306, 3, 160, 80, 0, 304, 305, 5, 116, 0, 0, 305, 307, 3, 160, 80, 0, 306, 304, 1, 0, 0, 0, 306, 307, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 309, 5, 42, 0, 0, 309, 310, 3, 4, 2, 0, 310, 311, 5, 149, 0, 0, 311, 313, 3, 10, 5, 0, 312, 314, 5, 150, 0, 0, 313, 312, 1, 0, 0, 0, 313, 314, 1, 0, 0, 0, 314, 27, 1, 0, 0, 0, 315, 316, 5, 31, 0, 0, 316, 317, 3, 160, 80, 0, 317, 319, 5, 130, 0, 0, 318, 320, 3, 8, 4, 0, 319, 318, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 322, 5, 149, 0, 0, 322, 323, 3, 36, 18, 0, 323, 29, 1, 0, 0, 0, 324, 325, 3, 4, 2, 0, 325, 326, 5, 115, 0, 0, 326, 327, 5, 122, 0, 0, 327, 328, 3, 4, 2, 0, 328, 31, 1, 0, 0, 0, 329, 331, 3, 4, 2, 0, 330, 332, 5, 150, 0, 0, 331, 330, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 33, 1, 0, 0, 0, 333, 334, 5, 150, 0, 0, 334, 35, 1, 0, 0, 0, 335, 339, 5, 128, 0, 0, 336, 338, 3, 2, 1, 0, 337, 336, 1, 0, 0, 0, 338, 341, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 339, 340, 1, 0, 0, 0, 340, 342, 1, 0, 0, 0, 341, 339, 1, 0, 0, 0, 342, 343, 5, 147, 0, 0, 343, 37, 1, 0, 0, 0, 344, 345, 3, 4, 2, 0, 345, 346, 5, 115, 0, 0, 346, 347, 3, 4, 2, 0, 347, 39, 1, 0, 0, 0, 348, 353, 3, 38, 19, 0, 349, 350, 5, 116, 0, 0, 350, 352, 3, 38, 19, 0, 351, 349, 1, 0, 0, 0, 352, 355, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 356, 358, 5, 116, 0, 0, 357, 356, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 41, 1, 0, 0, 0, 359, 363, 3, 44, 22, 0, 360, 363, 3, 48, 24, 0, 361, 363, 3, 124, 62, 0, 362, 359, 1, 0, 0, 0, 362, 360, 1, 0, 0, 0, 362, 361, 1, 0, 0, 0, 363, 364, 1, 0, 0, 0, 364, 365, 5, 0, 0, 1, 365, 43, 1, 0, 0, 0, 366, 372, 3, 46, 23, 0, 367, 368, 5, 95, 0, 0, 368, 369, 5, 1, 0, 0, 369, 371, 3, 46, 23, 0, 370, 367, 1, 0, 0, 0, 371, 374, 1, 0, 0, 0, 372, 370, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 45, 1, 0, 0, 0, 374, 372, 1, 0, 0, 0, 375, 382, 3, 48, 24, 0, 376, 377, 5, 130, 0, 0, 377, 378, 3, 44, 22, 0, 378, 379, 5, 149, 0, 0, 379, 382, 1, 0, 0, 0, 380, 382, 3, 164, 82, 0, 381, 375, 1, 0, 0, 0, 381, 376, 1, 0, 0, 0, 381, 380, 1, 0, 0, 0, 382, 47, 1, 0, 0, 0, 383, 385, 3, 50, 25, 0, 384, 383, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 386, 1, 0, 0, 0, 386, 388, 5, 79, 0, 0, 387, 389, 5, 24, 0, 0, 388, 387, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 391, 1, 0, 0, 0, 390, 392, 3, 52, 26, 0, 391, 390, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 395, 3, 114, 57, 0, 394, 396, 3, 54, 27, 0, 395, 394, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 398, 1, 0, 0, 0, 397, 399, 3, 56, 28, 0, 398, 397, 1, 0, 0, 0, 398, 399, 1, 0, 0, 0, 399, 401, 1, 0, 0, 0, 400, 402, 3, 60, 30, 0, 401, 400, 1, 0, 0, 0, 401, 402, 1, 0, 0, 0, 402, 404, 1, 0, 0, 0, 403, 405, 3, 62, 31, 0, 404, 403, 1, 0, 0, 0, 404, 405, 1, 0, 0, 0, 405, 407, 1, 0, 0, 0, 406, 408, 3, 64, 32, 0, 407, 406, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 411, 1, 0, 0, 0, 409, 410, 5, 102, 0, 0, 410, 412, 7, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 415, 1, 0, 0, 0, 413, 414, 5, 102, 0, 0, 414, 416, 5, 89, 0, 0, 415, 413, 1, 0, 0, 0, 415, 416, 1, 0, 0, 0, 416, 418, 1, 0, 0, 0, 417, 419, 3, 66, 33, 0, 418, 417, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 421, 1, 0, 0, 0, 420, 422, 3, 58, 29, 0, 421, 420, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 424, 1, 0, 0, 0, 423, 425, 3, 68, 34, 0, 424, 423, 1, 0, 0, 0, 424, 425, 1, 0, 0, 0, 425, 428, 1, 0, 0, 0, 426, 429, 3, 72, 36, 0, 427, 429, 3, 74, 37, 0, 428, 426, 1, 0, 0, 0, 428, 427, 1, 0, 0, 0, 428, 429, 1, 0, 0, 0, 429, 431, 1, 0, 0, 0, 430, 432, 3, 76, 38, 0, 431, 430, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 49, 1, 0, 0, 0, 433, 434, 5, 102, 0, 0, 434, 435, 3, 128, 64, 0, 435, 51, 1, 0, 0, 0, 436, 437, 5, 88, 0, 0, 437, 440, 5, 108, 0, 0, 438, 439, 5, 102, 0, 0, 439, 441, 5, 85, 0, 0, 440, 438, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 53, 1, 0, 0, 0, 442, 443, 5, 34, 0, 0, 443, 444, 3, 78, 39, 0, 444, 55, 1, 0, 0, 0, 445, 447, 7, 1, 0, 0, 446, 445, 1, 0, 0, 0, 446, 447, 1, 0, 0, 0, 447, 448, 1, 0, 0, 0, 448, 449, 5, 5, 0, 0, 449, 450, 5, 47, 0, 0, 450, 451, 3, 114, 57, 0, 451, 57, 1, 0, 0, 0, 452, 453, 5, 101, 0, 0, 453, 454, 3, 160, 80, 0, 454, 455, 5, 6, 0, 0, 455, 456, 5, 130, 0, 0, 456, 457, 3, 98, 49, 0, 457, 467, 5, 149, 0, 0, 458, 459, 5, 116, 0, 0, 459, 460, 3, 160, 80, 0, 460, 461, 5, 6, 0, 0, 461, 462, 5, 130, 0, 0, 462, 463, 3, 98, 49, 0, 463, 464, 5, 149, 0, 0, 464, 466, 1, 0, 0, 0, 465, 458, 1, 0, 0, 0, 466, 469, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 467, 468, 1, 0, 0, 0, 468, 59, 1, 0, 0, 0, 469, 467, 1, 0, 0, 0, 470, 471, 5, 69, 0, 0, 471, 472, 3, 116, 58, 0, 472, 61, 1, 0, 0, 0, 473, 474, 5, 99, 0, 0, 474, 475, 3, 116, 58, 0, 475, 63, 1, 0, 0, 0, 476, 477, 5, 36, 0, 0, 477, 484, 5, 11, 0, 0, 478, 479, 7, 0, 0, 0, 479, 480, 5, 130, 0, 0, 480, 481, 3, 114, 57, 0, 481, 482, 5, 149, 0, 0, 482, 485, 1, 0, 0, 0, 483, 485, 3, 114, 57, 0, 484, 478, 1, 0, 0, 0, 484, 483, 1, 0, 0, 0, 485, 65, 1, 0, 0, 0, 486, 487, 5, 37, 0, 0, 487, 488, 3, 116, 58, 0, 488, 67, 1, 0, 0, 0, 489, 490, 5, 64, 0, 0, 490, 491, 5, 11, 0, 0, 491, 492, 3, 88, 44, 0, 492, 69, 1, 0, 0, 0, 493, 494, 5, 64, 0, 0, 494, 495, 5, 11, 0, 0, 495, 496, 3, 114, 57, 0, 496, 71, 1, 0, 0, 0, 497, 498, 5, 54, 0, 0, 498, 501, 3, 116, 58, 0, 499, 500, 5, 116, 0, 0, 500, 502, 3, 116, 58, 0, 501, 499, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 507, 1, 0, 0, 0, 503, 504, 5, 102, 0, 0, 504, 508, 5, 85, 0, 0, 505, 506, 5, 11, 0, 0, 506, 508, 3, 114, 57, 0, 507, 503, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 527, 1, 0, 0, 0, 509, 510, 5, 54, 0, 0, 510, 513, 3, 116, 58, 0, 511, 512, 5, 102, 0, 0, 512, 514, 5, 85, 0, 0, 513, 511, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 5, 61, 0, 0, 516, 517, 3, 116, 58, 0, 517, 527, 1, 0, 0, 0, 518, 519, 5, 54, 0, 0, 519, 520, 3, 116, 58, 0, 520, 521, 5, 61, 0, 0, 521, 524, 3, 116, 58, 0, 522, 523, 5, 11, 0, 0, 523, 525, 3, 114, 57, 0, 524, 522, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 527, 1, 0, 0, 0, 526, 497, 1, 0, 0, 0, 526, 509, 1, 0, 0, 0, 526, 518, 1, 0, 0, 0, 527, 73, 1, 0, 0, 0, 528, 529, 5, 61, 0, 0, 529, 530, 3, 116, 58, 0, 530, 75, 1, 0, 0, 0, 531, 532, 5, 81, 0, 0, 532, 533, 3, 94, 47, 0, 533, 77, 1, 0, 0, 0, 534, 535, 6, 39, -1, 0, 535, 537, 3, 136, 68, 0, 536, 538, 5, 28, 0, 0, 537, 536, 1, 0, 0, 0, 537, 538, 1, 0, 0, 0, 538, 540, 1, 0, 0, 0, 539, 541, 3, 86, 43, 0, 540, 539, 1, 0, 0, 0, 540, 541, 1, 0, 0, 0, 541, 547, 1, 0, 0, 0, 542, 543, 5, 130, 0, 0, 543, 544, 3, 78, 39, 0, 544, 545, 5, 149, 0, 0, 545, 547, 1, 0, 0, 0, 546, 534, 1, 0, 0, 0, 546, 542, 1, 0, 0, 0, 547, 562, 1, 0, 0, 0, 548, 549, 10, 3, 0, 0, 549, 550, 3, 82, 41, 0, 550, 551, 3, 78, 39, 4, 551, 561, 1, 0, 0, 0, 552, 554, 10, 4, 0, 0, 553, 555, 3, 80, 40, 0, 554, 553, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 5, 47, 0, 0, 557, 558, 3, 78, 39, 0, 558, 559, 3, 84, 42, 0, 559, 561, 1, 0, 0, 0, 560, 548, 1, 0, 0, 0, 560, 552, 1, 0, 0, 0, 561, 564, 1, 0, 0, 0, 562, 560, 1, 0, 0, 0, 562, 563, 1, 0, 0, 0, 563, 79, 1, 0, 0, 0, 564, 562, 1, 0, 0, 0, 565, 567, 7, 2, 0, 0, 566, 565, 1, 0, 0, 0, 566, 567, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 575, 5, 44, 0, 0, 569, 571, 5, 44, 0, 0, 570, 572, 7, 2, 0, 0, 571, 570, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 575, 1, 0, 0, 0, 573, 575, 7, 2, 0, 0, 574, 566, 1, 0, 0, 0, 574, 569, 1, 0, 0, 0, 574, 573, 1, 0, 0, 0, 575, 609, 1, 0, 0, 0, 576, 578, 7, 3, 0, 0, 577, 576, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 581, 7, 4, 0, 0, 580, 582, 5, 65, 0, 0, 581, 580, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 591, 1, 0, 0, 0, 583, 585, 7, 4, 0, 0, 584, 586, 5, 65, 0, 0, 585, 584, 1, 0, 0, 0, 585, 586, 1, 0, 0, 0, 586, 588, 1, 0, 0, 0, 587, 589, 7, 3, 0, 0, 588, 587, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 591, 1, 0, 0, 0, 590, 577, 1, 0, 0, 0, 590, 583, 1, 0, 0, 0, 591, 609, 1, 0, 0, 0, 592, 594, 7, 5, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 595, 1, 0, 0, 0, 595, 597, 5, 35, 0, 0, 596, 598, 5, 65, 0, 0, 597, 596, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 607, 1, 0, 0, 0, 599, 601, 5, 35, 0, 0, 600, 602, 5, 65, 0, 0, 601, 600, 1, 0, 0, 0, 601, 602, 1, 0, 0, 0, 602, 604, 1, 0, 0, 0, 603, 605, 7, 5, 0, 0, 604, 603, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 607, 1, 0, 0, 0, 606, 593, 1, 0, 0, 0, 606, 599, 1, 0, 0, 0, 607, 609, 1, 0, 0, 0, 608, 574, 1, 0, 0, 0, 608, 590, 1, 0, 0, 0, 608, 606, 1, 0, 0, 0, 609, 81, 1, 0, 0, 0, 610, 611, 5, 17, 0, 0, 611, 614, 5, 47, 0, 0, 612, 614, 5, 116, 0, 0, 613, 610, 1, 0, 0, 0, 613, 612, 1, 0, 0, 0, 614, 83, 1, 0, 0, 0, 615, 616, 5, 62, 0, 0, 616, 625, 3, 114, 57, 0, 617, 618, 5, 96, 0, 0, 618, 619, 5, 130, 0, 0, 619, 620, 3, 114, 57, 0, 620, 621, 5, 149, 0, 0, 621, 625, 1, 0, 0, 0, 622, 623, 5, 96, 0, 0, 623, 625, 3, 114, 57, 0, 624, 615, 1, 0, 0, 0, 624, 617, 1, 0, 0, 0, 624, 622, 1, 0, 0, 0, 625, 85, 1, 0, 0, 0, 626, 627, 5, 77, 0, 0, 627, 630, 3, 92, 46, 0, 628, 629, 5, 61, 0, 0, 629, 631, 3, 92, 46, 0, 630, 628, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 87, 1, 0, 0, 0, 632, 637, 3, 90, 45, 0, 633, 634, 5, 116, 0, 0, 634, 636, 3, 90, 45, 0, 635, 633, 1, 0, 0, 0, 636, 639, 1, 0, 0, 0, 637, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 89, 1, 0, 0, 0, 639, 637, 1, 0, 0, 0, 640, 642, 3, 116, 58, 0, 641, 643, 7, 6, 0, 0, 642, 641, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 646, 1, 0, 0, 0, 644, 645, 5, 60, 0, 0, 645, 647, 7, 7, 0, 0, 646, 644, 1, 0, 0, 0, 646, 647, 1, 0, 0, 0, 647, 650, 1, 0, 0, 0, 648, 649, 5, 16, 0, 0, 649, 651, 5, 110, 0, 0, 650, 648, 1, 0, 0, 0, 650, 651, 1, 0, 0, 0, 651, 91, 1, 0, 0, 0, 652, 659, 3, 164, 82, 0, 653, 656, 3, 148, 74, 0, 654, 655, 5, 151, 0, 0, 655, 657, 3, 148, 74, 0, 656, 654, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 659, 1, 0, 0, 0, 658, 652, 1, 0, 0, 0, 658, 653, 1, 0, 0, 0, 659, 93, 1, 0, 0, 0, 660, 665, 3, 96, 48, 0, 661, 662, 5, 116, 0, 0, 662, 664, 3, 96, 48, 0, 663, 661, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 95, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 669, 3, 160, 80, 0, 669, 670, 5, 122, 0, 0, 670, 671, 3, 150, 75, 0, 671, 97, 1, 0, 0, 0, 672, 674, 3, 100, 50, 0, 673, 672, 1, 0, 0, 0, 673, 674, 1, 0, 0, 0, 674, 676, 1, 0, 0, 0, 675, 677, 3, 102, 51, 0, 676, 675, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 677, 679, 1, 0, 0, 0, 678, 680, 3, 104, 52, 0, 679, 678, 1, 0, 0, 0, 679, 680, 1, 0, 0, 0, 680, 99, 1, 0, 0, 0, 681, 682, 5, 67, 0, 0, 682, 683, 5, 11, 0, 0, 683, 684, 3, 114, 57, 0, 684, 101, 1, 0, 0, 0, 685, 686, 5, 64, 0, 0, 686, 687, 5, 11, 0, 0, 687, 688, 3, 88, 44, 0, 688, 103, 1, 0, 0, 0, 689, 690, 7, 8, 0, 0, 690, 691, 3, 106, 53, 0, 691, 105, 1, 0, 0, 0, 692, 699, 3, 108, 54, 0, 693, 694, 5, 9, 0, 0, 694, 695, 3, 108, 54, 0, 695, 696, 5, 2, 0, 0, 696, 697, 3, 108, 54, 0, 697, 699, 1, 0, 0, 0, 698, 692, 1, 0, 0, 0, 698, 693, 1, 0, 0, 0, 699, 107, 1, 0, 0, 0, 700, 701, 5, 19, 0, 0, 701, 713, 5, 75, 0, 0, 702, 703, 5, 94, 0, 0, 703, 713, 5, 68, 0, 0, 704, 705, 5, 94, 0, 0, 705, 713, 5, 32, 0, 0, 706, 707, 3, 148, 74, 0, 707, 708, 5, 68, 0, 0, 708, 713, 1, 0, 0, 0, 709, 710, 3, 148, 74, 0, 710, 711, 5, 32, 0, 0, 711, 713, 1, 0, 0, 0, 712, 700, 1, 0, 0, 0, 712, 702, 1, 0, 0, 0, 712, 704, 1, 0, 0, 0, 712, 706, 1, 0, 0, 0, 712, 709, 1, 0, 0, 0, 713, 109, 1, 0, 0, 0, 714, 715, 3, 116, 58, 0, 715, 716, 5, 0, 0, 1, 716, 111, 1, 0, 0, 0, 717, 774, 3, 160, 80, 0, 718, 719, 3, 160, 80, 0, 719, 720, 5, 130, 0, 0, 720, 721, 3, 160, 80, 0, 721, 728, 3, 112, 56, 0, 722, 723, 5, 116, 0, 0, 723, 724, 3, 160, 80, 0, 724, 725, 3, 112, 56, 0, 725, 727, 1, 0, 0, 0, 726, 722, 1, 0, 0, 0, 727, 730, 1, 0, 0, 0, 728, 726, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 732, 1, 0, 0, 0, 730, 728, 1, 0, 0, 0, 731, 733, 5, 116, 0, 0, 732, 731, 1, 0, 0, 0, 732, 733, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 735, 5, 149, 0, 0, 735, 774, 1, 0, 0, 0, 736, 737, 3, 160, 80, 0, 737, 738, 5, 130, 0, 0, 738, 743, 3, 162, 81, 0, 739, 740, 5, 116, 0, 0, 740, 742, 3, 162, 81, 0, 741, 739, 1, 0, 0, 0, 742, 745, 1, 0, 0, 0, 743, 741, 1, 0, 0, 0, 743, 744, 1, 0, 0, 0, 744, 747, 1, 0, 0, 0, 745, 743, 1, 0, 0, 0, 746, 748, 5, 116, 0, 0, 747, 746, 1, 0, 0, 0, 747, 748, 1, 0, 0, 0, 748, 749, 1, 0, 0, 0, 749, 750, 5, 149, 0, 0, 750, 774, 1, 0, 0, 0, 751, 752, 3, 160, 80, 0, 752, 753, 5, 130, 0, 0, 753, 758, 3, 112, 56, 0, 754, 755, 5, 116, 0, 0, 755, 757, 3, 112, 56, 0, 756, 754, 1, 0, 0, 0, 757, 760, 1, 0, 0, 0, 758, 756, 1, 0, 0, 0, 758, 759, 1, 0, 0, 0, 759, 762, 1, 0, 0, 0, 760, 758, 1, 0, 0, 0, 761, 763, 5, 116, 0, 0, 762, 761, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 763, 764, 1, 0, 0, 0, 764, 765, 5, 149, 0, 0, 765, 774, 1, 0, 0, 0, 766, 767, 3, 160, 80, 0, 767, 769, 5, 130, 0, 0, 768, 770, 3, 114, 57, 0, 769, 768, 1, 0, 0, 0, 769, 770, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 772, 5, 149, 0, 0, 772, 774, 1, 0, 0, 0, 773, 717, 1, 0, 0, 0, 773, 718, 1, 0, 0, 0, 773, 736, 1, 0, 0, 0, 773, 751, 1, 0, 0, 0, 773, 766, 1, 0, 0, 0, 774, 113, 1, 0, 0, 0, 775, 780, 3, 116, 58, 0, 776, 777, 5, 116, 0, 0, 777, 779, 3, 116, 58, 0, 778, 776, 1, 0, 0, 0, 779, 782, 1, 0, 0, 0, 780, 778, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 784, 1, 0, 0, 0, 782, 780, 1, 0, 0, 0, 783, 785, 5, 116, 0, 0, 784, 783, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 115, 1, 0, 0, 0, 786, 787, 6, 58, -1, 0, 787, 789, 5, 12, 0, 0, 788, 790, 3, 116, 58, 0, 789, 788, 1, 0, 0, 0, 789, 790, 1, 0, 0, 0, 790, 796, 1, 0, 0, 0, 791, 792, 5, 98, 0, 0, 792, 793, 3, 116, 58, 0, 793, 794, 5, 83, 0, 0, 794, 795, 3, 116, 58, 0, 795, 797, 1, 0, 0, 0, 796, 791, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 802, 1, 0, 0, 0, 800, 801, 5, 25, 0, 0, 801, 803, 3, 116, 58, 0, 802, 800, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 805, 5, 26, 0, 0, 805, 936, 1, 0, 0, 0, 806, 807, 5, 13, 0, 0, 807, 808, 5, 130, 0, 0, 808, 809, 3, 116, 58, 0, 809, 810, 5, 6, 0, 0, 810, 811, 3, 112, 56, 0, 811, 812, 5, 149, 0, 0, 812, 936, 1, 0, 0, 0, 813, 814, 5, 20, 0, 0, 814, 936, 5, 110, 0, 0, 815, 816, 5, 45, 0, 0, 816, 817, 3, 116, 58, 0, 817, 818, 3, 152, 76, 0, 818, 936, 1, 0, 0, 0, 819, 820, 5, 82, 0, 0, 820, 821, 5, 130, 0, 0, 821, 822, 3, 116, 58, 0, 822, 823, 5, 34, 0, 0, 823, 826, 3, 116, 58, 0, 824, 825, 5, 33, 0, 0, 825, 827, 3, 116, 58, 0, 826, 824, 1, 0, 0, 0, 826, 827, 1, 0, 0, 0, 827, 828, 1, 0, 0, 0, 828, 829, 5, 149, 0, 0, 829, 936, 1, 0, 0, 0, 830, 831, 5, 86, 0, 0, 831, 936, 5, 110, 0, 0, 832, 833, 5, 91, 0, 0, 833, 834, 5, 130, 0, 0, 834, 835, 7, 9, 0, 0, 835, 836, 3, 166, 83, 0, 836, 837, 5, 34, 0, 0, 837, 838, 3, 116, 58, 0, 838, 839, 5, 149, 0, 0, 839, 936, 1, 0, 0, 0, 840, 841, 3, 160, 80, 0, 841, 843, 5, 130, 0, 0, 842, 844, 3, 114, 57, 0, 843, 842, 1, 0, 0, 0, 843, 844, 1, 0, 0, 0, 844, 845, 1, 0, 0, 0, 845, 846, 5, 149, 0, 0, 846, 855, 1, 0, 0, 0, 847, 849, 5, 130, 0, 0, 848, 850, 5, 24, 0, 0, 849, 848, 1, 0, 0, 0, 849, 850, 1, 0, 0, 0, 850, 852, 1, 0, 0, 0, 851, 853, 3, 118, 59, 0, 852, 851, 1, 0, 0, 0, 852, 853, 1, 0, 0, 0, 853, 854, 1, 0, 0, 0, 854, 856, 5, 149, 0, 0, 855, 847, 1, 0, 0, 0, 855, 856, 1, 0, 0, 0, 856, 857, 1, 0, 0, 0, 857, 858, 5, 66, 0, 0, 858, 859, 5, 130, 0, 0, 859, 860, 3, 98, 49, 0, 860, 861, 5, 149, 0, 0, 861, 936, 1, 0, 0, 0, 862, 863, 3, 160, 80, 0, 863, 865, 5, 130, 0, 0, 864, 866, 3, 114, 57, 0, 865, 864, 1, 0, 0, 0, 865, 866, 1, 0, 0, 0, 866, 867, 1, 0, 0, 0, 867, 868, 5, 149, 0, 0, 868, 877, 1, 0, 0, 0, 869, 871, 5, 130, 0, 0, 870, 872, 5, 24, 0, 0, 871, 870, 1, 0, 0, 0, 871, 872, 1, 0, 0, 0, 872, 874, 1, 0, 0, 0, 873, 875, 3, 118, 59, 0, 874, 873, 1, 0, 0, 0, 874, 875, 1, 0, 0, 0, 875, 876, 1, 0, 0, 0, 876, 878, 5, 149, 0, 0, 877, 869, 1, 0, 0, 0, 877, 878, 1, 0, 0, 0, 878, 879, 1, 0, 0, 0, 879, 880, 5, 66, 0, 0, 880, 881, 3, 160, 80, 0, 881, 936, 1, 0, 0, 0, 882, 888, 3, 160, 80, 0, 883, 885, 5, 130, 0, 0, 884, 886, 3, 114, 57, 0, 885, 884, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 887, 1, 0, 0, 0, 887, 889, 5, 149, 0, 0, 888, 883, 1, 0, 0, 0, 888, 889, 1, 0, 0, 0, 889, 890, 1, 0, 0, 0, 890, 892, 5, 130, 0, 0, 891, 893, 5, 24, 0, 0, 892, 891, 1, 0, 0, 0, 892, 893, 1, 0, 0, 0, 893, 895, 1, 0, 0, 0, 894, 896, 3, 118, 59, 0, 895, 894, 1, 0, 0, 0, 895, 896, 1, 0, 0, 0, 896, 897, 1, 0, 0, 0, 897, 898, 5, 149, 0, 0, 898, 936, 1, 0, 0, 0, 899, 936, 3, 124, 62, 0, 900, 936, 3, 168, 84, 0, 901, 936, 3, 150, 75, 0, 902, 903, 5, 118, 0, 0, 903, 936, 3, 116, 58, 19, 904, 905, 5, 58, 0, 0, 905, 936, 3, 116, 58, 13, 906, 907, 3, 140, 70, 0, 907, 908, 5, 120, 0, 0, 908, 910, 1, 0, 0, 0, 909, 906, 1, 0, 0, 0, 909, 910, 1, 0, 0, 0, 910, 911, 1, 0, 0, 0, 911, 936, 5, 112, 0, 0, 912, 913, 5, 130, 0, 0, 913, 914, 3, 44, 22, 0, 914, 915, 5, 149, 0, 0, 915, 936, 1, 0, 0, 0, 916, 917, 5, 130, 0, 0, 917, 918, 3, 116, 58, 0, 918, 919, 5, 149, 0, 0, 919, 936, 1, 0, 0, 0, 920, 921, 5, 130, 0, 0, 921, 922, 3, 114, 57, 0, 922, 923, 5, 149, 0, 0, 923, 936, 1, 0, 0, 0, 924, 926, 5, 129, 0, 0, 925, 927, 3, 114, 57, 0, 926, 925, 1, 0, 0, 0, 926, 927, 1, 0, 0, 0, 927, 928, 1, 0, 0, 0, 928, 936, 5, 148, 0, 0, 929, 931, 5, 128, 0, 0, 930, 932, 3, 40, 20, 0, 931, 930, 1, 0, 0, 0, 931, 932, 1, 0, 0, 0, 932, 933, 1, 0, 0, 0, 933, 936, 5, 147, 0, 0, 934, 936, 3, 132, 66, 0, 935, 786, 1, 0, 0, 0, 935, 806, 1, 0, 0, 0, 935, 813, 1, 0, 0, 0, 935, 815, 1, 0, 0, 0, 935, 819, 1, 0, 0, 0, 935, 830, 1, 0, 0, 0, 935, 832, 1, 0, 0, 0, 935, 840, 1, 0, 0, 0, 935, 862, 1, 0, 0, 0, 935, 882, 1, 0, 0, 0, 935, 899, 1, 0, 0, 0, 935, 900, 1, 0, 0, 0, 935, 901, 1, 0, 0, 0, 935, 902, 1, 0, 0, 0, 935, 904, 1, 0, 0, 0, 935, 909, 1, 0, 0, 0, 935, 912, 1, 0, 0, 0, 935, 916, 1, 0, 0, 0, 935, 920, 1, 0, 0, 0, 935, 924, 1, 0, 0, 0, 935, 929, 1, 0, 0, 0, 935, 934, 1, 0, 0, 0, 936, 1041, 1, 0, 0, 0, 937, 941, 10, 18, 0, 0, 938, 942, 5, 112, 0, 0, 939, 942, 5, 151, 0, 0, 940, 942, 5, 138, 0, 0, 941, 938, 1, 0, 0, 0, 941, 939, 1, 0, 0, 0, 941, 940, 1, 0, 0, 0, 942, 943, 1, 0, 0, 0, 943, 1040, 3, 116, 58, 19, 944, 948, 10, 17, 0, 0, 945, 949, 5, 139, 0, 0, 946, 949, 5, 118, 0, 0, 947, 949, 5, 117, 0, 0, 948, 945, 1, 0, 0, 0, 948, 946, 1, 0, 0, 0, 948, 947, 1, 0, 0, 0, 949, 950, 1, 0, 0, 0, 950, 1040, 3, 116, 58, 18, 951, 976, 10, 16, 0, 0, 952, 977, 5, 121, 0, 0, 953, 977, 5, 122, 0, 0, 954, 977, 5, 133, 0, 0, 955, 977, 5, 131, 0, 0, 956, 977, 5, 132, 0, 0, 957, 977, 5, 123, 0, 0, 958, 977, 5, 124, 0, 0, 959, 961, 5, 58, 0, 0, 960, 959, 1, 0, 0, 0, 960, 961, 1, 0, 0, 0, 961, 962, 1, 0, 0, 0, 962, 964, 5, 42, 0, 0, 963, 965, 5, 15, 0, 0, 964, 963, 1, 0, 0, 0, 964, 965, 1, 0, 0, 0, 965, 977, 1, 0, 0, 0, 966, 968, 5, 58, 0, 0, 967, 966, 1, 0, 0, 0, 967, 968, 1, 0, 0, 0, 968, 969, 1, 0, 0, 0, 969, 977, 7, 10, 0, 0, 970, 977, 5, 145, 0, 0, 971, 977, 5, 146, 0, 0, 972, 977, 5, 135, 0, 0, 973, 977, 5, 126, 0, 0, 974, 977, 5, 127, 0, 0, 975, 977, 5, 134, 0, 0, 976, 952, 1, 0, 0, 0, 976, 953, 1, 0, 0, 0, 976, 954, 1, 0, 0, 0, 976, 955, 1, 0, 0, 0, 976, 956, 1, 0, 0, 0, 976, 957, 1, 0, 0, 0, 976, 958, 1, 0, 0, 0, 976, 960, 1, 0, 0, 0, 976, 967, 1, 0, 0, 0, 976, 970, 1, 0, 0, 0, 976, 971, 1, 0, 0, 0, 976, 972, 1, 0, 0, 0, 976, 973, 1, 0, 0, 0, 976, 974, 1, 0, 0, 0, 976, 975, 1, 0, 0, 0, 977, 978, 1, 0, 0, 0, 978, 1040, 3, 116, 58, 17, 979, 980, 10, 14, 0, 0, 980, 981, 5, 137, 0, 0, 981, 1040, 3, 116, 58, 15, 982, 983, 10, 12, 0, 0, 983, 984, 5, 2, 0, 0, 984, 1040, 3, 116, 58, 13, 985, 986, 10, 11, 0, 0, 986, 987, 5, 63, 0, 0, 987, 1040, 3, 116, 58, 12, 988, 990, 10, 10, 0, 0, 989, 991, 5, 58, 0, 0, 990, 989, 1, 0, 0, 0, 990, 991, 1, 0, 0, 0, 991, 992, 1, 0, 0, 0, 992, 993, 5, 9, 0, 0, 993, 994, 3, 116, 58, 0, 994, 995, 5, 2, 0, 0, 995, 996, 3, 116, 58, 11, 996, 1040, 1, 0, 0, 0, 997, 998, 10, 9, 0, 0, 998, 999, 5, 140, 0, 0, 999, 1000, 3, 116, 58, 0, 1000, 1001, 5, 115, 0, 0, 1001, 1002, 3, 116, 58, 9, 1002, 1040, 1, 0, 0, 0, 1003, 1004, 10, 25, 0, 0, 1004, 1005, 5, 129, 0, 0, 1005, 1006, 3, 116, 58, 0, 1006, 1007, 5, 148, 0, 0, 1007, 1040, 1, 0, 0, 0, 1008, 1009, 10, 24, 0, 0, 1009, 1010, 5, 120, 0, 0, 1010, 1040, 5, 108, 0, 0, 1011, 1012, 10, 23, 0, 0, 1012, 1013, 5, 120, 0, 0, 1013, 1040, 3, 160, 80, 0, 1014, 1015, 10, 22, 0, 0, 1015, 1016, 5, 136, 0, 0, 1016, 1017, 5, 129, 0, 0, 1017, 1018, 3, 116, 58, 0, 1018, 1019, 5, 148, 0, 0, 1019, 1040, 1, 0, 0, 0, 1020, 1021, 10, 21, 0, 0, 1021, 1022, 5, 136, 0, 0, 1022, 1040, 5, 108, 0, 0, 1023, 1024, 10, 20, 0, 0, 1024, 1025, 5, 136, 0, 0, 1025, 1040, 3, 160, 80, 0, 1026, 1027, 10, 15, 0, 0, 1027, 1029, 5, 46, 0, 0, 1028, 1030, 5, 58, 0, 0, 1029, 1028, 1, 0, 0, 0, 1029, 1030, 1, 0, 0, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1040, 5, 59, 0, 0, 1032, 1037, 10, 8, 0, 0, 1033, 1034, 5, 6, 0, 0, 1034, 1038, 3, 160, 80, 0, 1035, 1036, 5, 6, 0, 0, 1036, 1038, 5, 110, 0, 0, 1037, 1033, 1, 0, 0, 0, 1037, 1035, 1, 0, 0, 0, 1038, 1040, 1, 0, 0, 0, 1039, 937, 1, 0, 0, 0, 1039, 944, 1, 0, 0, 0, 1039, 951, 1, 0, 0, 0, 1039, 979, 1, 0, 0, 0, 1039, 982, 1, 0, 0, 0, 1039, 985, 1, 0, 0, 0, 1039, 988, 1, 0, 0, 0, 1039, 997, 1, 0, 0, 0, 1039, 1003, 1, 0, 0, 0, 1039, 1008, 1, 0, 0, 0, 1039, 1011, 1, 0, 0, 0, 1039, 1014, 1, 0, 0, 0, 1039, 1020, 1, 0, 0, 0, 1039, 1023, 1, 0, 0, 0, 1039, 1026, 1, 0, 0, 0, 1039, 1032, 1, 0, 0, 0, 1040, 1043, 1, 0, 0, 0, 1041, 1039, 1, 0, 0, 0, 1041, 1042, 1, 0, 0, 0, 1042, 117, 1, 0, 0, 0, 1043, 1041, 1, 0, 0, 0, 1044, 1049, 3, 120, 60, 0, 1045, 1046, 5, 116, 0, 0, 1046, 1048, 3, 120, 60, 0, 1047, 1045, 1, 0, 0, 0, 1048, 1051, 1, 0, 0, 0, 1049, 1047, 1, 0, 0, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1053, 1, 0, 0, 0, 1051, 1049, 1, 0, 0, 0, 1052, 1054, 5, 116, 0, 0, 1053, 1052, 1, 0, 0, 0, 1053, 1054, 1, 0, 0, 0, 1054, 119, 1, 0, 0, 0, 1055, 1058, 3, 122, 61, 0, 1056, 1058, 3, 116, 58, 0, 1057, 1055, 1, 0, 0, 0, 1057, 1056, 1, 0, 0, 0, 1058, 121, 1, 0, 0, 0, 1059, 1060, 5, 130, 0, 0, 1060, 1065, 3, 160, 80, 0, 1061, 1062, 5, 116, 0, 0, 1062, 1064, 3, 160, 80, 0, 1063, 1061, 1, 0, 0, 0, 1064, 1067, 1, 0, 0, 0, 1065, 1063, 1, 0, 0, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1069, 1, 0, 0, 0, 1067, 1065, 1, 0, 0, 0, 1068, 1070, 5, 116, 0, 0, 1069, 1068, 1, 0, 0, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1072, 5, 149, 0, 0, 1072, 1085, 1, 0, 0, 0, 1073, 1078, 3, 160, 80, 0, 1074, 1075, 5, 116, 0, 0, 1075, 1077, 3, 160, 80, 0, 1076, 1074, 1, 0, 0, 0, 1077, 1080, 1, 0, 0, 0, 1078, 1076, 1, 0, 0, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1082, 1, 0, 0, 0, 1080, 1078, 1, 0, 0, 0, 1081, 1083, 5, 116, 0, 0, 1082, 1081, 1, 0, 0, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1085, 1, 0, 0, 0, 1084, 1059, 1, 0, 0, 0, 1084, 1073, 1, 0, 0, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 5, 111, 0, 0, 1087, 1088, 3, 116, 58, 0, 1088, 123, 1, 0, 0, 0, 1089, 1090, 5, 132, 0, 0, 1090, 1094, 3, 160, 80, 0, 1091, 1093, 3, 126, 63, 0, 1092, 1091, 1, 0, 0, 0, 1093, 1096, 1, 0, 0, 0, 1094, 1092, 1, 0, 0, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1097, 1, 0, 0, 0, 1096, 1094, 1, 0, 0, 0, 1097, 1098, 5, 151, 0, 0, 1098, 1099, 5, 124, 0, 0, 1099, 1122, 1, 0, 0, 0, 1100, 1101, 5, 132, 0, 0, 1101, 1105, 3, 160, 80, 0, 1102, 1104, 3, 126, 63, 0, 1103, 1102, 1, 0, 0, 0, 1104, 1107, 1, 0, 0, 0, 1105, 1103, 1, 0, 0, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1108, 1, 0, 0, 0, 1107, 1105, 1, 0, 0, 0, 1108, 1114, 5, 124, 0, 0, 1109, 1115, 3, 124, 62, 0, 1110, 1111, 5, 128, 0, 0, 1111, 1112, 3, 116, 58, 0, 1112, 1113, 5, 147, 0, 0, 1113, 1115, 1, 0, 0, 0, 1114, 1109, 1, 0, 0, 0, 1114, 1110, 1, 0, 0, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 5, 132, 0, 0, 1117, 1118, 5, 151, 0, 0, 1118, 1119, 3, 160, 80, 0, 1119, 1120, 5, 124, 0, 0, 1120, 1122, 1, 0, 0, 0, 1121, 1089, 1, 0, 0, 0, 1121, 1100, 1, 0, 0, 0, 1122, 125, 1, 0, 0, 0, 1123, 1124, 3, 160, 80, 0, 1124, 1125, 5, 122, 0, 0, 1125, 1126, 3, 166, 83, 0, 1126, 1135, 1, 0, 0, 0, 1127, 1128, 3, 160, 80, 0, 1128, 1129, 5, 122, 0, 0, 1129, 1130, 5, 128, 0, 0, 1130, 1131, 3, 116, 58, 0, 1131, 1132, 5, 147, 0, 0, 1132, 1135, 1, 0, 0, 0, 1133, 1135, 3, 160, 80, 0, 1134, 1123, 1, 0, 0, 0, 1134, 1127, 1, 0, 0, 0, 1134, 1133, 1, 0, 0, 0, 1135, 127, 1, 0, 0, 0, 1136, 1141, 3, 130, 65, 0, 1137, 1138, 5, 116, 0, 0, 1138, 1140, 3, 130, 65, 0, 1139, 1137, 1, 0, 0, 0, 1140, 1143, 1, 0, 0, 0, 1141, 1139, 1, 0, 0, 0, 1141, 1142, 1, 0, 0, 0, 1142, 1145, 1, 0, 0, 0, 1143, 1141, 1, 0, 0, 0, 1144, 1146, 5, 116, 0, 0, 1145, 1144, 1, 0, 0, 0, 1145, 1146, 1, 0, 0, 0, 1146, 129, 1, 0, 0, 0, 1147, 1148, 3, 160, 80, 0, 1148, 1149, 5, 6, 0, 0, 1149, 1150, 5, 130, 0, 0, 1150, 1151, 3, 44, 22, 0, 1151, 1152, 5, 149, 0, 0, 1152, 1158, 1, 0, 0, 0, 1153, 1154, 3, 116, 58, 0, 1154, 1155, 5, 6, 0, 0, 1155, 1156, 3, 160, 80, 0, 1156, 1158, 1, 0, 0, 0, 1157, 1147, 1, 0, 0, 0, 1157, 1153, 1, 0, 0, 0, 1158, 131, 1, 0, 0, 0, 1159, 1167, 3, 164, 82, 0, 1160, 1161, 3, 140, 70, 0, 1161, 1162, 5, 120, 0, 0, 1162, 1164, 1, 0, 0, 0, 1163, 1160, 1, 0, 0, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1167, 3, 134, 67, 0, 1166, 1159, 1, 0, 0, 0, 1166, 1163, 1, 0, 0, 0, 1167, 133, 1, 0, 0, 0, 1168, 1173, 3, 160, 80, 0, 1169, 1170, 5, 120, 0, 0, 1170, 1172, 3, 160, 80, 0, 1171, 1169, 1, 0, 0, 0, 1172, 1175, 1, 0, 0, 0, 1173, 1171, 1, 0, 0, 0, 1173, 1174, 1, 0, 0, 0, 1174, 135, 1, 0, 0, 0, 1175, 1173, 1, 0, 0, 0, 1176, 1177, 6, 68, -1, 0, 1177, 1186, 3, 140, 70, 0, 1178, 1186, 3, 138, 69, 0, 1179, 1180, 5, 130, 0, 0, 1180, 1181, 3, 44, 22, 0, 1181, 1182, 5, 149, 0, 0, 1182, 1186, 1, 0, 0, 0, 1183, 1186, 3, 124, 62, 0, 1184, 1186, 3, 164, 82, 0, 1185, 1176, 1, 0, 0, 0, 1185, 1178, 1, 0, 0, 0, 1185, 1179, 1, 0, 0, 0, 1185, 1183, 1, 0, 0, 0, 1185, 1184, 1, 0, 0, 0, 1186, 1195, 1, 0, 0, 0, 1187, 1191, 10, 3, 0, 0, 1188, 1192, 3, 158, 79, 0, 1189, 1190, 5, 6, 0, 0, 1190, 1192, 3, 160, 80, 0, 1191, 1188, 1, 0, 0, 0, 1191, 1189, 1, 0, 0, 0, 1192, 1194, 1, 0, 0, 0, 1193, 1187, 1, 0, 0, 0, 1194, 1197, 1, 0, 0, 0, 1195, 1193, 1, 0, 0, 0, 1195, 1196, 1, 0, 0, 0, 1196, 137, 1, 0, 0, 0, 1197, 1195, 1, 0, 0, 0, 1198, 1199, 3, 160, 80, 0, 1199, 1201, 5, 130, 0, 0, 1200, 1202, 3, 142, 71, 0, 1201, 1200, 1, 0, 0, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 1, 0, 0, 0, 1203, 1204, 5, 149, 0, 0, 1204, 139, 1, 0, 0, 0, 1205, 1206, 3, 144, 72, 0, 1206, 1207, 5, 120, 0, 0, 1207, 1209, 1, 0, 0, 0, 1208, 1205, 1, 0, 0, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 3, 160, 80, 0, 1211, 141, 1, 0, 0, 0, 1212, 1217, 3, 116, 58, 0, 1213, 1214, 5, 116, 0, 0, 1214, 1216, 3, 116, 58, 0, 1215, 1213, 1, 0, 0, 0, 1216, 1219, 1, 0, 0, 0, 1217, 1215, 1, 0, 0, 0, 1217, 1218, 1, 0, 0, 0, 1218, 1221, 1, 0, 0, 0, 1219, 1217, 1, 0, 0, 0, 1220, 1222, 5, 116, 0, 0, 1221, 1220, 1, 0, 0, 0, 1221, 1222, 1, 0, 0, 0, 1222, 143, 1, 0, 0, 0, 1223, 1224, 3, 160, 80, 0, 1224, 145, 1, 0, 0, 0, 1225, 1234, 5, 106, 0, 0, 1226, 1227, 5, 120, 0, 0, 1227, 1234, 7, 11, 0, 0, 1228, 1229, 5, 108, 0, 0, 1229, 1231, 5, 120, 0, 0, 1230, 1232, 7, 11, 0, 0, 1231, 1230, 1, 0, 0, 0, 1231, 1232, 1, 0, 0, 0, 1232, 1234, 1, 0, 0, 0, 1233, 1225, 1, 0, 0, 0, 1233, 1226, 1, 0, 0, 0, 1233, 1228, 1, 0, 0, 0, 1234, 147, 1, 0, 0, 0, 1235, 1237, 7, 12, 0, 0, 1236, 1235, 1, 0, 0, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1244, 1, 0, 0, 0, 1238, 1245, 3, 146, 73, 0, 1239, 1245, 5, 107, 0, 0, 1240, 1245, 5, 108, 0, 0, 1241, 1245, 5, 109, 0, 0, 1242, 1245, 5, 43, 0, 0, 1243, 1245, 5, 57, 0, 0, 1244, 1238, 1, 0, 0, 0, 1244, 1239, 1, 0, 0, 0, 1244, 1240, 1, 0, 0, 0, 1244, 1241, 1, 0, 0, 0, 1244, 1242, 1, 0, 0, 0, 1244, 1243, 1, 0, 0, 0, 1245, 149, 1, 0, 0, 0, 1246, 1250, 3, 148, 74, 0, 1247, 1250, 5, 110, 0, 0, 1248, 1250, 5, 59, 0, 0, 1249, 1246, 1, 0, 0, 0, 1249, 1247, 1, 0, 0, 0, 1249, 1248, 1, 0, 0, 0, 1250, 151, 1, 0, 0, 0, 1251, 1252, 7, 13, 0, 0, 1252, 153, 1, 0, 0, 0, 1253, 1254, 7, 14, 0, 0, 1254, 155, 1, 0, 0, 0, 1255, 1256, 7, 15, 0, 0, 1256, 157, 1, 0, 0, 0, 1257, 1260, 5, 105, 0, 0, 1258, 1260, 3, 156, 78, 0, 1259, 1257, 1, 0, 0, 0, 1259, 1258, 1, 0, 0, 0, 1260, 159, 1, 0, 0, 0, 1261, 1265, 5, 105, 0, 0, 1262, 1265, 3, 152, 76, 0, 1263, 1265, 3, 154, 77, 0, 1264, 1261, 1, 0, 0, 0, 1264, 1262, 1, 0, 0, 0, 1264, 1263, 1, 0, 0, 0, 1265, 161, 1, 0, 0, 0, 1266, 1267, 3, 166, 83, 0, 1267, 1268, 5, 122, 0, 0, 1268, 1269, 3, 148, 74, 0, 1269, 163, 1, 0, 0, 0, 1270, 1271, 5, 128, 0, 0, 1271, 1272, 3, 134, 67, 0, 1272, 1273, 5, 147, 0, 0, 1273, 165, 1, 0, 0, 0, 1274, 1277, 5, 110, 0, 0, 1275, 1277, 3, 168, 84, 0, 1276, 1274, 1, 0, 0, 0, 1276, 1275, 1, 0, 0, 0, 1277, 167, 1, 0, 0, 0, 1278, 1282, 5, 142, 0, 0, 1279, 1281, 3, 170, 85, 0, 1280, 1279, 1, 0, 0, 0, 1281, 1284, 1, 0, 0, 0, 1282, 1280, 1, 0, 0, 0, 1282, 1283, 1, 0, 0, 0, 1283, 1285, 1, 0, 0, 0, 1284, 1282, 1, 0, 0, 0, 1285, 1286, 5, 144, 0, 0, 1286, 169, 1, 0, 0, 0, 1287, 1288, 5, 157, 0, 0, 1288, 1289, 3, 116, 58, 0, 1289, 1290, 5, 147, 0, 0, 1290, 1293, 1, 0, 0, 0, 1291, 1293, 5, 156, 0, 0, 1292, 1287, 1, 0, 0, 0, 1292, 1291, 1, 0, 0, 0, 1293, 171, 1, 0, 0, 0, 1294, 1298, 5, 143, 0, 0, 1295, 1297, 3, 174, 87, 0, 1296, 1295, 1, 0, 0, 0, 1297, 1300, 1, 0, 0, 0, 1298, 1296, 1, 0, 0, 0, 1298, 1299, 1, 0, 0, 0, 1299, 1301, 1, 0, 0, 0, 1300, 1298, 1, 0, 0, 0, 1301, 1302, 5, 0, 0, 1, 1302, 173, 1, 0, 0, 0, 1303, 1304, 5, 159, 0, 0, 1304, 1305, 3, 116, 58, 0, 1305, 1306, 5, 147, 0, 0, 1306, 1309, 1, 0, 0, 0, 1307, 1309, 5, 158, 0, 0, 1308, 1303, 1, 0, 0, 0, 1308, 1307, 1, 0, 0, 0, 1309, 175, 1, 0, 0, 0, 168, 179, 186, 195, 202, 206, 220, 224, 227, 231, 234, 241, 245, 254, 259, 268, 276, 283, 287, 293, 298, 306, 313, 319, 331, 339, 353, 357, 362, 372, 381, 384, 388, 391, 395, 398, 401, 404, 407, 411, 415, 418, 421, 424, 428, 431, 440, 446, 467, 484, 501, 507, 513, 524, 526, 537, 540, 546, 554, 560, 562, 566, 571, 574, 577, 581, 585, 588, 590, 593, 597, 601, 604, 606, 608, 613, 624, 630, 637, 642, 646, 650, 656, 658, 665, 673, 676, 679, 698, 712, 728, 732, 743, 747, 758, 762, 769, 773, 780, 784, 789, 798, 802, 826, 843, 849, 852, 855, 865, 871, 874, 877, 885, 888, 892, 895, 909, 926, 931, 935, 941, 948, 960, 964, 967, 976, 990, 1029, 1037, 1039, 1041, 1049, 1053, 1057, 1065, 1069, 1078, 1082, 1084, 1094, 1105, 1114, 1121, 1134, 1141, 1145, 1157, 1163, 1166, 1173, 1185, 1191, 1195, 1201, 1208, 1217, 1221, 1231, 1233, 1236, 1244, 1249, 1259, 1264, 1276, 1282, 1292, 1298, 1308] \ No newline at end of file +[4, 1, 159, 1303, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 1, 0, 5, 0, 174, 8, 0, 10, 0, 12, 0, 177, 9, 0, 1, 0, 1, 0, 1, 1, 1, 1, 3, 1, 183, 8, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 192, 8, 3, 1, 4, 1, 4, 1, 4, 5, 4, 197, 8, 4, 10, 4, 12, 4, 200, 9, 4, 1, 4, 3, 4, 203, 8, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 217, 8, 5, 1, 6, 1, 6, 3, 6, 221, 8, 6, 1, 6, 3, 6, 224, 8, 6, 1, 7, 1, 7, 3, 7, 228, 8, 7, 1, 7, 3, 7, 231, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 238, 8, 8, 1, 8, 1, 8, 3, 8, 242, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 5, 9, 249, 8, 9, 10, 9, 12, 9, 252, 9, 9, 1, 9, 1, 9, 3, 9, 256, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 265, 8, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 273, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 3, 12, 280, 8, 12, 1, 12, 1, 12, 3, 12, 284, 8, 12, 1, 12, 1, 12, 1, 12, 1, 12, 3, 12, 290, 8, 12, 1, 12, 1, 12, 1, 12, 3, 12, 295, 8, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 303, 8, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 310, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 316, 8, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 3, 16, 328, 8, 16, 1, 17, 1, 17, 1, 18, 1, 18, 5, 18, 334, 8, 18, 10, 18, 12, 18, 337, 9, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 5, 20, 348, 8, 20, 10, 20, 12, 20, 351, 9, 20, 1, 20, 3, 20, 354, 8, 20, 1, 21, 1, 21, 1, 21, 3, 21, 359, 8, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 367, 8, 22, 10, 22, 12, 22, 370, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 3, 23, 378, 8, 23, 1, 24, 3, 24, 381, 8, 24, 1, 24, 1, 24, 3, 24, 385, 8, 24, 1, 24, 3, 24, 388, 8, 24, 1, 24, 1, 24, 3, 24, 392, 8, 24, 1, 24, 3, 24, 395, 8, 24, 1, 24, 3, 24, 398, 8, 24, 1, 24, 3, 24, 401, 8, 24, 1, 24, 3, 24, 404, 8, 24, 1, 24, 1, 24, 3, 24, 408, 8, 24, 1, 24, 1, 24, 3, 24, 412, 8, 24, 1, 24, 3, 24, 415, 8, 24, 1, 24, 3, 24, 418, 8, 24, 1, 24, 3, 24, 421, 8, 24, 1, 24, 1, 24, 3, 24, 425, 8, 24, 1, 24, 3, 24, 428, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 437, 8, 26, 1, 27, 1, 27, 1, 27, 1, 28, 3, 28, 443, 8, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 462, 8, 29, 10, 29, 12, 29, 465, 9, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 481, 8, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 498, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 504, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 510, 8, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 521, 8, 36, 3, 36, 523, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 3, 39, 534, 8, 39, 1, 39, 3, 39, 537, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 543, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 551, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 557, 8, 39, 10, 39, 12, 39, 560, 9, 39, 1, 40, 3, 40, 563, 8, 40, 1, 40, 1, 40, 1, 40, 3, 40, 568, 8, 40, 1, 40, 3, 40, 571, 8, 40, 1, 40, 3, 40, 574, 8, 40, 1, 40, 1, 40, 3, 40, 578, 8, 40, 1, 40, 1, 40, 3, 40, 582, 8, 40, 1, 40, 3, 40, 585, 8, 40, 3, 40, 587, 8, 40, 1, 40, 3, 40, 590, 8, 40, 1, 40, 1, 40, 3, 40, 594, 8, 40, 1, 40, 1, 40, 3, 40, 598, 8, 40, 1, 40, 3, 40, 601, 8, 40, 3, 40, 603, 8, 40, 3, 40, 605, 8, 40, 1, 41, 1, 41, 1, 41, 3, 41, 610, 8, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 621, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 3, 43, 627, 8, 43, 1, 44, 1, 44, 1, 44, 5, 44, 632, 8, 44, 10, 44, 12, 44, 635, 9, 44, 1, 45, 1, 45, 3, 45, 639, 8, 45, 1, 45, 1, 45, 3, 45, 643, 8, 45, 1, 45, 1, 45, 3, 45, 647, 8, 45, 1, 46, 1, 46, 1, 46, 1, 46, 3, 46, 653, 8, 46, 3, 46, 655, 8, 46, 1, 47, 1, 47, 1, 47, 5, 47, 660, 8, 47, 10, 47, 12, 47, 663, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 3, 49, 670, 8, 49, 1, 49, 3, 49, 673, 8, 49, 1, 49, 3, 49, 676, 8, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 695, 8, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 3, 54, 709, 8, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 723, 8, 56, 10, 56, 12, 56, 726, 9, 56, 1, 56, 3, 56, 729, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 738, 8, 56, 10, 56, 12, 56, 741, 9, 56, 1, 56, 3, 56, 744, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 753, 8, 56, 10, 56, 12, 56, 756, 9, 56, 1, 56, 3, 56, 759, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 766, 8, 56, 1, 56, 1, 56, 3, 56, 770, 8, 56, 1, 57, 1, 57, 1, 57, 5, 57, 775, 8, 57, 10, 57, 12, 57, 778, 9, 57, 1, 57, 3, 57, 781, 8, 57, 1, 58, 1, 58, 1, 58, 3, 58, 786, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 4, 58, 793, 8, 58, 11, 58, 12, 58, 794, 1, 58, 1, 58, 3, 58, 799, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 823, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 840, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 846, 8, 58, 1, 58, 3, 58, 849, 8, 58, 1, 58, 3, 58, 852, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 862, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 868, 8, 58, 1, 58, 3, 58, 871, 8, 58, 1, 58, 3, 58, 874, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 882, 8, 58, 1, 58, 3, 58, 885, 8, 58, 1, 58, 1, 58, 3, 58, 889, 8, 58, 1, 58, 3, 58, 892, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 906, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 923, 8, 58, 1, 58, 1, 58, 1, 58, 3, 58, 928, 8, 58, 1, 58, 1, 58, 1, 58, 3, 58, 933, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 939, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 946, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 958, 8, 58, 1, 58, 1, 58, 3, 58, 962, 8, 58, 1, 58, 3, 58, 965, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 974, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 988, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 1004, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 1033, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 1041, 8, 58, 5, 58, 1043, 8, 58, 10, 58, 12, 58, 1046, 9, 58, 1, 59, 1, 59, 1, 59, 1, 59, 5, 59, 1052, 8, 59, 10, 59, 12, 59, 1055, 9, 59, 1, 59, 3, 59, 1058, 8, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 5, 59, 1065, 8, 59, 10, 59, 12, 59, 1068, 9, 59, 1, 59, 3, 59, 1071, 8, 59, 1, 59, 1, 59, 3, 59, 1075, 8, 59, 1, 59, 1, 59, 1, 59, 3, 59, 1080, 8, 59, 1, 60, 1, 60, 1, 60, 5, 60, 1085, 8, 60, 10, 60, 12, 60, 1088, 9, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 5, 60, 1096, 8, 60, 10, 60, 12, 60, 1099, 9, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 3, 60, 1107, 8, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 3, 60, 1114, 8, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 1127, 8, 61, 1, 62, 1, 62, 1, 62, 5, 62, 1132, 8, 62, 10, 62, 12, 62, 1135, 9, 62, 1, 62, 3, 62, 1138, 8, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 3, 63, 1150, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 3, 64, 1156, 8, 64, 1, 64, 3, 64, 1159, 8, 64, 1, 65, 1, 65, 1, 65, 5, 65, 1164, 8, 65, 10, 65, 12, 65, 1167, 9, 65, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 1178, 8, 66, 1, 66, 1, 66, 1, 66, 1, 66, 3, 66, 1184, 8, 66, 5, 66, 1186, 8, 66, 10, 66, 12, 66, 1189, 9, 66, 1, 67, 1, 67, 1, 67, 3, 67, 1194, 8, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 3, 68, 1201, 8, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 5, 69, 1208, 8, 69, 10, 69, 12, 69, 1211, 9, 69, 1, 69, 3, 69, 1214, 8, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 3, 71, 1224, 8, 71, 3, 71, 1226, 8, 71, 1, 72, 3, 72, 1229, 8, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 3, 72, 1237, 8, 72, 1, 73, 1, 73, 1, 73, 3, 73, 1242, 8, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 3, 77, 1252, 8, 77, 1, 78, 1, 78, 1, 78, 3, 78, 1257, 8, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 3, 81, 1269, 8, 81, 1, 82, 1, 82, 5, 82, 1273, 8, 82, 10, 82, 12, 82, 1276, 9, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 3, 83, 1285, 8, 83, 1, 84, 1, 84, 5, 84, 1289, 8, 84, 10, 84, 12, 84, 1292, 9, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 3, 85, 1301, 8, 85, 1, 85, 0, 3, 78, 116, 132, 86, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 166, 168, 170, 0, 16, 2, 0, 18, 18, 74, 74, 2, 0, 44, 44, 51, 51, 3, 0, 1, 1, 4, 4, 8, 8, 4, 0, 1, 1, 3, 4, 8, 8, 80, 80, 2, 0, 51, 51, 73, 73, 2, 0, 1, 1, 4, 4, 2, 0, 7, 7, 22, 23, 2, 0, 30, 30, 49, 49, 2, 0, 71, 71, 76, 76, 3, 0, 10, 10, 50, 50, 90, 90, 2, 0, 41, 41, 53, 53, 1, 0, 107, 108, 2, 0, 118, 118, 139, 139, 7, 0, 21, 21, 38, 38, 55, 56, 70, 70, 78, 78, 97, 97, 103, 103, 16, 0, 1, 13, 15, 20, 22, 28, 30, 30, 32, 37, 39, 42, 44, 51, 53, 54, 58, 58, 60, 69, 71, 77, 79, 83, 85, 92, 94, 96, 98, 99, 101, 102, 4, 0, 20, 20, 30, 30, 39, 39, 48, 48, 1475, 0, 175, 1, 0, 0, 0, 2, 182, 1, 0, 0, 0, 4, 184, 1, 0, 0, 0, 6, 186, 1, 0, 0, 0, 8, 193, 1, 0, 0, 0, 10, 216, 1, 0, 0, 0, 12, 218, 1, 0, 0, 0, 14, 225, 1, 0, 0, 0, 16, 232, 1, 0, 0, 0, 18, 245, 1, 0, 0, 0, 20, 257, 1, 0, 0, 0, 22, 266, 1, 0, 0, 0, 24, 274, 1, 0, 0, 0, 26, 296, 1, 0, 0, 0, 28, 311, 1, 0, 0, 0, 30, 320, 1, 0, 0, 0, 32, 325, 1, 0, 0, 0, 34, 329, 1, 0, 0, 0, 36, 331, 1, 0, 0, 0, 38, 340, 1, 0, 0, 0, 40, 344, 1, 0, 0, 0, 42, 358, 1, 0, 0, 0, 44, 362, 1, 0, 0, 0, 46, 377, 1, 0, 0, 0, 48, 380, 1, 0, 0, 0, 50, 429, 1, 0, 0, 0, 52, 432, 1, 0, 0, 0, 54, 438, 1, 0, 0, 0, 56, 442, 1, 0, 0, 0, 58, 448, 1, 0, 0, 0, 60, 466, 1, 0, 0, 0, 62, 469, 1, 0, 0, 0, 64, 472, 1, 0, 0, 0, 66, 482, 1, 0, 0, 0, 68, 485, 1, 0, 0, 0, 70, 489, 1, 0, 0, 0, 72, 522, 1, 0, 0, 0, 74, 524, 1, 0, 0, 0, 76, 527, 1, 0, 0, 0, 78, 542, 1, 0, 0, 0, 80, 604, 1, 0, 0, 0, 82, 609, 1, 0, 0, 0, 84, 620, 1, 0, 0, 0, 86, 622, 1, 0, 0, 0, 88, 628, 1, 0, 0, 0, 90, 636, 1, 0, 0, 0, 92, 654, 1, 0, 0, 0, 94, 656, 1, 0, 0, 0, 96, 664, 1, 0, 0, 0, 98, 669, 1, 0, 0, 0, 100, 677, 1, 0, 0, 0, 102, 681, 1, 0, 0, 0, 104, 685, 1, 0, 0, 0, 106, 694, 1, 0, 0, 0, 108, 708, 1, 0, 0, 0, 110, 710, 1, 0, 0, 0, 112, 769, 1, 0, 0, 0, 114, 771, 1, 0, 0, 0, 116, 932, 1, 0, 0, 0, 118, 1074, 1, 0, 0, 0, 120, 1113, 1, 0, 0, 0, 122, 1126, 1, 0, 0, 0, 124, 1128, 1, 0, 0, 0, 126, 1149, 1, 0, 0, 0, 128, 1158, 1, 0, 0, 0, 130, 1160, 1, 0, 0, 0, 132, 1177, 1, 0, 0, 0, 134, 1190, 1, 0, 0, 0, 136, 1200, 1, 0, 0, 0, 138, 1204, 1, 0, 0, 0, 140, 1215, 1, 0, 0, 0, 142, 1225, 1, 0, 0, 0, 144, 1228, 1, 0, 0, 0, 146, 1241, 1, 0, 0, 0, 148, 1243, 1, 0, 0, 0, 150, 1245, 1, 0, 0, 0, 152, 1247, 1, 0, 0, 0, 154, 1251, 1, 0, 0, 0, 156, 1256, 1, 0, 0, 0, 158, 1258, 1, 0, 0, 0, 160, 1262, 1, 0, 0, 0, 162, 1268, 1, 0, 0, 0, 164, 1270, 1, 0, 0, 0, 166, 1284, 1, 0, 0, 0, 168, 1286, 1, 0, 0, 0, 170, 1300, 1, 0, 0, 0, 172, 174, 3, 2, 1, 0, 173, 172, 1, 0, 0, 0, 174, 177, 1, 0, 0, 0, 175, 173, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 178, 1, 0, 0, 0, 177, 175, 1, 0, 0, 0, 178, 179, 5, 0, 0, 1, 179, 1, 1, 0, 0, 0, 180, 183, 3, 6, 3, 0, 181, 183, 3, 10, 5, 0, 182, 180, 1, 0, 0, 0, 182, 181, 1, 0, 0, 0, 183, 3, 1, 0, 0, 0, 184, 185, 3, 116, 58, 0, 185, 5, 1, 0, 0, 0, 186, 187, 5, 52, 0, 0, 187, 191, 3, 156, 78, 0, 188, 189, 5, 115, 0, 0, 189, 190, 5, 122, 0, 0, 190, 192, 3, 4, 2, 0, 191, 188, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 7, 1, 0, 0, 0, 193, 198, 3, 156, 78, 0, 194, 195, 5, 116, 0, 0, 195, 197, 3, 156, 78, 0, 196, 194, 1, 0, 0, 0, 197, 200, 1, 0, 0, 0, 198, 196, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 202, 1, 0, 0, 0, 200, 198, 1, 0, 0, 0, 201, 203, 5, 116, 0, 0, 202, 201, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 9, 1, 0, 0, 0, 204, 217, 3, 12, 6, 0, 205, 217, 3, 14, 7, 0, 206, 217, 3, 18, 9, 0, 207, 217, 3, 20, 10, 0, 208, 217, 3, 22, 11, 0, 209, 217, 3, 26, 13, 0, 210, 217, 3, 24, 12, 0, 211, 217, 3, 28, 14, 0, 212, 217, 3, 30, 15, 0, 213, 217, 3, 36, 18, 0, 214, 217, 3, 32, 16, 0, 215, 217, 3, 34, 17, 0, 216, 204, 1, 0, 0, 0, 216, 205, 1, 0, 0, 0, 216, 206, 1, 0, 0, 0, 216, 207, 1, 0, 0, 0, 216, 208, 1, 0, 0, 0, 216, 209, 1, 0, 0, 0, 216, 210, 1, 0, 0, 0, 216, 211, 1, 0, 0, 0, 216, 212, 1, 0, 0, 0, 216, 213, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 215, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 220, 5, 72, 0, 0, 219, 221, 3, 4, 2, 0, 220, 219, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 223, 1, 0, 0, 0, 222, 224, 5, 150, 0, 0, 223, 222, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 13, 1, 0, 0, 0, 225, 227, 5, 84, 0, 0, 226, 228, 3, 4, 2, 0, 227, 226, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 230, 1, 0, 0, 0, 229, 231, 5, 150, 0, 0, 230, 229, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 15, 1, 0, 0, 0, 232, 241, 5, 14, 0, 0, 233, 234, 5, 130, 0, 0, 234, 237, 3, 156, 78, 0, 235, 236, 5, 115, 0, 0, 236, 238, 3, 156, 78, 0, 237, 235, 1, 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 5, 149, 0, 0, 240, 242, 1, 0, 0, 0, 241, 233, 1, 0, 0, 0, 241, 242, 1, 0, 0, 0, 242, 243, 1, 0, 0, 0, 243, 244, 3, 36, 18, 0, 244, 17, 1, 0, 0, 0, 245, 246, 5, 93, 0, 0, 246, 250, 3, 36, 18, 0, 247, 249, 3, 16, 8, 0, 248, 247, 1, 0, 0, 0, 249, 252, 1, 0, 0, 0, 250, 248, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, 255, 1, 0, 0, 0, 252, 250, 1, 0, 0, 0, 253, 254, 5, 29, 0, 0, 254, 256, 3, 36, 18, 0, 255, 253, 1, 0, 0, 0, 255, 256, 1, 0, 0, 0, 256, 19, 1, 0, 0, 0, 257, 258, 5, 40, 0, 0, 258, 259, 5, 130, 0, 0, 259, 260, 3, 4, 2, 0, 260, 261, 5, 149, 0, 0, 261, 264, 3, 10, 5, 0, 262, 263, 5, 25, 0, 0, 263, 265, 3, 10, 5, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 21, 1, 0, 0, 0, 266, 267, 5, 100, 0, 0, 267, 268, 5, 130, 0, 0, 268, 269, 3, 4, 2, 0, 269, 270, 5, 149, 0, 0, 270, 272, 3, 10, 5, 0, 271, 273, 5, 150, 0, 0, 272, 271, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 23, 1, 0, 0, 0, 274, 275, 5, 33, 0, 0, 275, 279, 5, 130, 0, 0, 276, 280, 3, 6, 3, 0, 277, 280, 3, 30, 15, 0, 278, 280, 3, 4, 2, 0, 279, 276, 1, 0, 0, 0, 279, 277, 1, 0, 0, 0, 279, 278, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 283, 5, 150, 0, 0, 282, 284, 3, 4, 2, 0, 283, 282, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 289, 5, 150, 0, 0, 286, 290, 3, 6, 3, 0, 287, 290, 3, 30, 15, 0, 288, 290, 3, 4, 2, 0, 289, 286, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 289, 288, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 292, 5, 149, 0, 0, 292, 294, 3, 10, 5, 0, 293, 295, 5, 150, 0, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 25, 1, 0, 0, 0, 296, 297, 5, 33, 0, 0, 297, 298, 5, 130, 0, 0, 298, 299, 5, 52, 0, 0, 299, 302, 3, 156, 78, 0, 300, 301, 5, 116, 0, 0, 301, 303, 3, 156, 78, 0, 302, 300, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 304, 1, 0, 0, 0, 304, 305, 5, 42, 0, 0, 305, 306, 3, 4, 2, 0, 306, 307, 5, 149, 0, 0, 307, 309, 3, 10, 5, 0, 308, 310, 5, 150, 0, 0, 309, 308, 1, 0, 0, 0, 309, 310, 1, 0, 0, 0, 310, 27, 1, 0, 0, 0, 311, 312, 5, 31, 0, 0, 312, 313, 3, 156, 78, 0, 313, 315, 5, 130, 0, 0, 314, 316, 3, 8, 4, 0, 315, 314, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 317, 1, 0, 0, 0, 317, 318, 5, 149, 0, 0, 318, 319, 3, 36, 18, 0, 319, 29, 1, 0, 0, 0, 320, 321, 3, 4, 2, 0, 321, 322, 5, 115, 0, 0, 322, 323, 5, 122, 0, 0, 323, 324, 3, 4, 2, 0, 324, 31, 1, 0, 0, 0, 325, 327, 3, 4, 2, 0, 326, 328, 5, 150, 0, 0, 327, 326, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 33, 1, 0, 0, 0, 329, 330, 5, 150, 0, 0, 330, 35, 1, 0, 0, 0, 331, 335, 5, 128, 0, 0, 332, 334, 3, 2, 1, 0, 333, 332, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 333, 1, 0, 0, 0, 335, 336, 1, 0, 0, 0, 336, 338, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 338, 339, 5, 147, 0, 0, 339, 37, 1, 0, 0, 0, 340, 341, 3, 4, 2, 0, 341, 342, 5, 115, 0, 0, 342, 343, 3, 4, 2, 0, 343, 39, 1, 0, 0, 0, 344, 349, 3, 38, 19, 0, 345, 346, 5, 116, 0, 0, 346, 348, 3, 38, 19, 0, 347, 345, 1, 0, 0, 0, 348, 351, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 349, 350, 1, 0, 0, 0, 350, 353, 1, 0, 0, 0, 351, 349, 1, 0, 0, 0, 352, 354, 5, 116, 0, 0, 353, 352, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 41, 1, 0, 0, 0, 355, 359, 3, 44, 22, 0, 356, 359, 3, 48, 24, 0, 357, 359, 3, 120, 60, 0, 358, 355, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 357, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 361, 5, 0, 0, 1, 361, 43, 1, 0, 0, 0, 362, 368, 3, 46, 23, 0, 363, 364, 5, 95, 0, 0, 364, 365, 5, 1, 0, 0, 365, 367, 3, 46, 23, 0, 366, 363, 1, 0, 0, 0, 367, 370, 1, 0, 0, 0, 368, 366, 1, 0, 0, 0, 368, 369, 1, 0, 0, 0, 369, 45, 1, 0, 0, 0, 370, 368, 1, 0, 0, 0, 371, 378, 3, 48, 24, 0, 372, 373, 5, 130, 0, 0, 373, 374, 3, 44, 22, 0, 374, 375, 5, 149, 0, 0, 375, 378, 1, 0, 0, 0, 376, 378, 3, 160, 80, 0, 377, 371, 1, 0, 0, 0, 377, 372, 1, 0, 0, 0, 377, 376, 1, 0, 0, 0, 378, 47, 1, 0, 0, 0, 379, 381, 3, 50, 25, 0, 380, 379, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 382, 1, 0, 0, 0, 382, 384, 5, 79, 0, 0, 383, 385, 5, 24, 0, 0, 384, 383, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 387, 1, 0, 0, 0, 386, 388, 3, 52, 26, 0, 387, 386, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 391, 3, 114, 57, 0, 390, 392, 3, 54, 27, 0, 391, 390, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 394, 1, 0, 0, 0, 393, 395, 3, 56, 28, 0, 394, 393, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 397, 1, 0, 0, 0, 396, 398, 3, 60, 30, 0, 397, 396, 1, 0, 0, 0, 397, 398, 1, 0, 0, 0, 398, 400, 1, 0, 0, 0, 399, 401, 3, 62, 31, 0, 400, 399, 1, 0, 0, 0, 400, 401, 1, 0, 0, 0, 401, 403, 1, 0, 0, 0, 402, 404, 3, 64, 32, 0, 403, 402, 1, 0, 0, 0, 403, 404, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 406, 5, 102, 0, 0, 406, 408, 7, 0, 0, 0, 407, 405, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 411, 1, 0, 0, 0, 409, 410, 5, 102, 0, 0, 410, 412, 5, 89, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 414, 1, 0, 0, 0, 413, 415, 3, 66, 33, 0, 414, 413, 1, 0, 0, 0, 414, 415, 1, 0, 0, 0, 415, 417, 1, 0, 0, 0, 416, 418, 3, 58, 29, 0, 417, 416, 1, 0, 0, 0, 417, 418, 1, 0, 0, 0, 418, 420, 1, 0, 0, 0, 419, 421, 3, 68, 34, 0, 420, 419, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 424, 1, 0, 0, 0, 422, 425, 3, 72, 36, 0, 423, 425, 3, 74, 37, 0, 424, 422, 1, 0, 0, 0, 424, 423, 1, 0, 0, 0, 424, 425, 1, 0, 0, 0, 425, 427, 1, 0, 0, 0, 426, 428, 3, 76, 38, 0, 427, 426, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 49, 1, 0, 0, 0, 429, 430, 5, 102, 0, 0, 430, 431, 3, 124, 62, 0, 431, 51, 1, 0, 0, 0, 432, 433, 5, 88, 0, 0, 433, 436, 5, 108, 0, 0, 434, 435, 5, 102, 0, 0, 435, 437, 5, 85, 0, 0, 436, 434, 1, 0, 0, 0, 436, 437, 1, 0, 0, 0, 437, 53, 1, 0, 0, 0, 438, 439, 5, 34, 0, 0, 439, 440, 3, 78, 39, 0, 440, 55, 1, 0, 0, 0, 441, 443, 7, 1, 0, 0, 442, 441, 1, 0, 0, 0, 442, 443, 1, 0, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 5, 5, 0, 0, 445, 446, 5, 47, 0, 0, 446, 447, 3, 114, 57, 0, 447, 57, 1, 0, 0, 0, 448, 449, 5, 101, 0, 0, 449, 450, 3, 156, 78, 0, 450, 451, 5, 6, 0, 0, 451, 452, 5, 130, 0, 0, 452, 453, 3, 98, 49, 0, 453, 463, 5, 149, 0, 0, 454, 455, 5, 116, 0, 0, 455, 456, 3, 156, 78, 0, 456, 457, 5, 6, 0, 0, 457, 458, 5, 130, 0, 0, 458, 459, 3, 98, 49, 0, 459, 460, 5, 149, 0, 0, 460, 462, 1, 0, 0, 0, 461, 454, 1, 0, 0, 0, 462, 465, 1, 0, 0, 0, 463, 461, 1, 0, 0, 0, 463, 464, 1, 0, 0, 0, 464, 59, 1, 0, 0, 0, 465, 463, 1, 0, 0, 0, 466, 467, 5, 69, 0, 0, 467, 468, 3, 116, 58, 0, 468, 61, 1, 0, 0, 0, 469, 470, 5, 99, 0, 0, 470, 471, 3, 116, 58, 0, 471, 63, 1, 0, 0, 0, 472, 473, 5, 36, 0, 0, 473, 480, 5, 11, 0, 0, 474, 475, 7, 0, 0, 0, 475, 476, 5, 130, 0, 0, 476, 477, 3, 114, 57, 0, 477, 478, 5, 149, 0, 0, 478, 481, 1, 0, 0, 0, 479, 481, 3, 114, 57, 0, 480, 474, 1, 0, 0, 0, 480, 479, 1, 0, 0, 0, 481, 65, 1, 0, 0, 0, 482, 483, 5, 37, 0, 0, 483, 484, 3, 116, 58, 0, 484, 67, 1, 0, 0, 0, 485, 486, 5, 64, 0, 0, 486, 487, 5, 11, 0, 0, 487, 488, 3, 88, 44, 0, 488, 69, 1, 0, 0, 0, 489, 490, 5, 64, 0, 0, 490, 491, 5, 11, 0, 0, 491, 492, 3, 114, 57, 0, 492, 71, 1, 0, 0, 0, 493, 494, 5, 54, 0, 0, 494, 497, 3, 116, 58, 0, 495, 496, 5, 116, 0, 0, 496, 498, 3, 116, 58, 0, 497, 495, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 503, 1, 0, 0, 0, 499, 500, 5, 102, 0, 0, 500, 504, 5, 85, 0, 0, 501, 502, 5, 11, 0, 0, 502, 504, 3, 114, 57, 0, 503, 499, 1, 0, 0, 0, 503, 501, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 523, 1, 0, 0, 0, 505, 506, 5, 54, 0, 0, 506, 509, 3, 116, 58, 0, 507, 508, 5, 102, 0, 0, 508, 510, 5, 85, 0, 0, 509, 507, 1, 0, 0, 0, 509, 510, 1, 0, 0, 0, 510, 511, 1, 0, 0, 0, 511, 512, 5, 61, 0, 0, 512, 513, 3, 116, 58, 0, 513, 523, 1, 0, 0, 0, 514, 515, 5, 54, 0, 0, 515, 516, 3, 116, 58, 0, 516, 517, 5, 61, 0, 0, 517, 520, 3, 116, 58, 0, 518, 519, 5, 11, 0, 0, 519, 521, 3, 114, 57, 0, 520, 518, 1, 0, 0, 0, 520, 521, 1, 0, 0, 0, 521, 523, 1, 0, 0, 0, 522, 493, 1, 0, 0, 0, 522, 505, 1, 0, 0, 0, 522, 514, 1, 0, 0, 0, 523, 73, 1, 0, 0, 0, 524, 525, 5, 61, 0, 0, 525, 526, 3, 116, 58, 0, 526, 75, 1, 0, 0, 0, 527, 528, 5, 81, 0, 0, 528, 529, 3, 94, 47, 0, 529, 77, 1, 0, 0, 0, 530, 531, 6, 39, -1, 0, 531, 533, 3, 132, 66, 0, 532, 534, 5, 28, 0, 0, 533, 532, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 536, 1, 0, 0, 0, 535, 537, 3, 86, 43, 0, 536, 535, 1, 0, 0, 0, 536, 537, 1, 0, 0, 0, 537, 543, 1, 0, 0, 0, 538, 539, 5, 130, 0, 0, 539, 540, 3, 78, 39, 0, 540, 541, 5, 149, 0, 0, 541, 543, 1, 0, 0, 0, 542, 530, 1, 0, 0, 0, 542, 538, 1, 0, 0, 0, 543, 558, 1, 0, 0, 0, 544, 545, 10, 3, 0, 0, 545, 546, 3, 82, 41, 0, 546, 547, 3, 78, 39, 4, 547, 557, 1, 0, 0, 0, 548, 550, 10, 4, 0, 0, 549, 551, 3, 80, 40, 0, 550, 549, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 5, 47, 0, 0, 553, 554, 3, 78, 39, 0, 554, 555, 3, 84, 42, 0, 555, 557, 1, 0, 0, 0, 556, 544, 1, 0, 0, 0, 556, 548, 1, 0, 0, 0, 557, 560, 1, 0, 0, 0, 558, 556, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 79, 1, 0, 0, 0, 560, 558, 1, 0, 0, 0, 561, 563, 7, 2, 0, 0, 562, 561, 1, 0, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 571, 5, 44, 0, 0, 565, 567, 5, 44, 0, 0, 566, 568, 7, 2, 0, 0, 567, 566, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 571, 1, 0, 0, 0, 569, 571, 7, 2, 0, 0, 570, 562, 1, 0, 0, 0, 570, 565, 1, 0, 0, 0, 570, 569, 1, 0, 0, 0, 571, 605, 1, 0, 0, 0, 572, 574, 7, 3, 0, 0, 573, 572, 1, 0, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 577, 7, 4, 0, 0, 576, 578, 5, 65, 0, 0, 577, 576, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 587, 1, 0, 0, 0, 579, 581, 7, 4, 0, 0, 580, 582, 5, 65, 0, 0, 581, 580, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 584, 1, 0, 0, 0, 583, 585, 7, 3, 0, 0, 584, 583, 1, 0, 0, 0, 584, 585, 1, 0, 0, 0, 585, 587, 1, 0, 0, 0, 586, 573, 1, 0, 0, 0, 586, 579, 1, 0, 0, 0, 587, 605, 1, 0, 0, 0, 588, 590, 7, 5, 0, 0, 589, 588, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 591, 1, 0, 0, 0, 591, 593, 5, 35, 0, 0, 592, 594, 5, 65, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 603, 1, 0, 0, 0, 595, 597, 5, 35, 0, 0, 596, 598, 5, 65, 0, 0, 597, 596, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 600, 1, 0, 0, 0, 599, 601, 7, 5, 0, 0, 600, 599, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 603, 1, 0, 0, 0, 602, 589, 1, 0, 0, 0, 602, 595, 1, 0, 0, 0, 603, 605, 1, 0, 0, 0, 604, 570, 1, 0, 0, 0, 604, 586, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 81, 1, 0, 0, 0, 606, 607, 5, 17, 0, 0, 607, 610, 5, 47, 0, 0, 608, 610, 5, 116, 0, 0, 609, 606, 1, 0, 0, 0, 609, 608, 1, 0, 0, 0, 610, 83, 1, 0, 0, 0, 611, 612, 5, 62, 0, 0, 612, 621, 3, 114, 57, 0, 613, 614, 5, 96, 0, 0, 614, 615, 5, 130, 0, 0, 615, 616, 3, 114, 57, 0, 616, 617, 5, 149, 0, 0, 617, 621, 1, 0, 0, 0, 618, 619, 5, 96, 0, 0, 619, 621, 3, 114, 57, 0, 620, 611, 1, 0, 0, 0, 620, 613, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 621, 85, 1, 0, 0, 0, 622, 623, 5, 77, 0, 0, 623, 626, 3, 92, 46, 0, 624, 625, 5, 61, 0, 0, 625, 627, 3, 92, 46, 0, 626, 624, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 87, 1, 0, 0, 0, 628, 633, 3, 90, 45, 0, 629, 630, 5, 116, 0, 0, 630, 632, 3, 90, 45, 0, 631, 629, 1, 0, 0, 0, 632, 635, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 89, 1, 0, 0, 0, 635, 633, 1, 0, 0, 0, 636, 638, 3, 116, 58, 0, 637, 639, 7, 6, 0, 0, 638, 637, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 642, 1, 0, 0, 0, 640, 641, 5, 60, 0, 0, 641, 643, 7, 7, 0, 0, 642, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 646, 1, 0, 0, 0, 644, 645, 5, 16, 0, 0, 645, 647, 5, 110, 0, 0, 646, 644, 1, 0, 0, 0, 646, 647, 1, 0, 0, 0, 647, 91, 1, 0, 0, 0, 648, 655, 3, 160, 80, 0, 649, 652, 3, 144, 72, 0, 650, 651, 5, 151, 0, 0, 651, 653, 3, 144, 72, 0, 652, 650, 1, 0, 0, 0, 652, 653, 1, 0, 0, 0, 653, 655, 1, 0, 0, 0, 654, 648, 1, 0, 0, 0, 654, 649, 1, 0, 0, 0, 655, 93, 1, 0, 0, 0, 656, 661, 3, 96, 48, 0, 657, 658, 5, 116, 0, 0, 658, 660, 3, 96, 48, 0, 659, 657, 1, 0, 0, 0, 660, 663, 1, 0, 0, 0, 661, 659, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 95, 1, 0, 0, 0, 663, 661, 1, 0, 0, 0, 664, 665, 3, 156, 78, 0, 665, 666, 5, 122, 0, 0, 666, 667, 3, 146, 73, 0, 667, 97, 1, 0, 0, 0, 668, 670, 3, 100, 50, 0, 669, 668, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 672, 1, 0, 0, 0, 671, 673, 3, 102, 51, 0, 672, 671, 1, 0, 0, 0, 672, 673, 1, 0, 0, 0, 673, 675, 1, 0, 0, 0, 674, 676, 3, 104, 52, 0, 675, 674, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 99, 1, 0, 0, 0, 677, 678, 5, 67, 0, 0, 678, 679, 5, 11, 0, 0, 679, 680, 3, 114, 57, 0, 680, 101, 1, 0, 0, 0, 681, 682, 5, 64, 0, 0, 682, 683, 5, 11, 0, 0, 683, 684, 3, 88, 44, 0, 684, 103, 1, 0, 0, 0, 685, 686, 7, 8, 0, 0, 686, 687, 3, 106, 53, 0, 687, 105, 1, 0, 0, 0, 688, 695, 3, 108, 54, 0, 689, 690, 5, 9, 0, 0, 690, 691, 3, 108, 54, 0, 691, 692, 5, 2, 0, 0, 692, 693, 3, 108, 54, 0, 693, 695, 1, 0, 0, 0, 694, 688, 1, 0, 0, 0, 694, 689, 1, 0, 0, 0, 695, 107, 1, 0, 0, 0, 696, 697, 5, 19, 0, 0, 697, 709, 5, 75, 0, 0, 698, 699, 5, 94, 0, 0, 699, 709, 5, 68, 0, 0, 700, 701, 5, 94, 0, 0, 701, 709, 5, 32, 0, 0, 702, 703, 3, 144, 72, 0, 703, 704, 5, 68, 0, 0, 704, 709, 1, 0, 0, 0, 705, 706, 3, 144, 72, 0, 706, 707, 5, 32, 0, 0, 707, 709, 1, 0, 0, 0, 708, 696, 1, 0, 0, 0, 708, 698, 1, 0, 0, 0, 708, 700, 1, 0, 0, 0, 708, 702, 1, 0, 0, 0, 708, 705, 1, 0, 0, 0, 709, 109, 1, 0, 0, 0, 710, 711, 3, 116, 58, 0, 711, 712, 5, 0, 0, 1, 712, 111, 1, 0, 0, 0, 713, 770, 3, 156, 78, 0, 714, 715, 3, 156, 78, 0, 715, 716, 5, 130, 0, 0, 716, 717, 3, 156, 78, 0, 717, 724, 3, 112, 56, 0, 718, 719, 5, 116, 0, 0, 719, 720, 3, 156, 78, 0, 720, 721, 3, 112, 56, 0, 721, 723, 1, 0, 0, 0, 722, 718, 1, 0, 0, 0, 723, 726, 1, 0, 0, 0, 724, 722, 1, 0, 0, 0, 724, 725, 1, 0, 0, 0, 725, 728, 1, 0, 0, 0, 726, 724, 1, 0, 0, 0, 727, 729, 5, 116, 0, 0, 728, 727, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 730, 1, 0, 0, 0, 730, 731, 5, 149, 0, 0, 731, 770, 1, 0, 0, 0, 732, 733, 3, 156, 78, 0, 733, 734, 5, 130, 0, 0, 734, 739, 3, 158, 79, 0, 735, 736, 5, 116, 0, 0, 736, 738, 3, 158, 79, 0, 737, 735, 1, 0, 0, 0, 738, 741, 1, 0, 0, 0, 739, 737, 1, 0, 0, 0, 739, 740, 1, 0, 0, 0, 740, 743, 1, 0, 0, 0, 741, 739, 1, 0, 0, 0, 742, 744, 5, 116, 0, 0, 743, 742, 1, 0, 0, 0, 743, 744, 1, 0, 0, 0, 744, 745, 1, 0, 0, 0, 745, 746, 5, 149, 0, 0, 746, 770, 1, 0, 0, 0, 747, 748, 3, 156, 78, 0, 748, 749, 5, 130, 0, 0, 749, 754, 3, 112, 56, 0, 750, 751, 5, 116, 0, 0, 751, 753, 3, 112, 56, 0, 752, 750, 1, 0, 0, 0, 753, 756, 1, 0, 0, 0, 754, 752, 1, 0, 0, 0, 754, 755, 1, 0, 0, 0, 755, 758, 1, 0, 0, 0, 756, 754, 1, 0, 0, 0, 757, 759, 5, 116, 0, 0, 758, 757, 1, 0, 0, 0, 758, 759, 1, 0, 0, 0, 759, 760, 1, 0, 0, 0, 760, 761, 5, 149, 0, 0, 761, 770, 1, 0, 0, 0, 762, 763, 3, 156, 78, 0, 763, 765, 5, 130, 0, 0, 764, 766, 3, 114, 57, 0, 765, 764, 1, 0, 0, 0, 765, 766, 1, 0, 0, 0, 766, 767, 1, 0, 0, 0, 767, 768, 5, 149, 0, 0, 768, 770, 1, 0, 0, 0, 769, 713, 1, 0, 0, 0, 769, 714, 1, 0, 0, 0, 769, 732, 1, 0, 0, 0, 769, 747, 1, 0, 0, 0, 769, 762, 1, 0, 0, 0, 770, 113, 1, 0, 0, 0, 771, 776, 3, 116, 58, 0, 772, 773, 5, 116, 0, 0, 773, 775, 3, 116, 58, 0, 774, 772, 1, 0, 0, 0, 775, 778, 1, 0, 0, 0, 776, 774, 1, 0, 0, 0, 776, 777, 1, 0, 0, 0, 777, 780, 1, 0, 0, 0, 778, 776, 1, 0, 0, 0, 779, 781, 5, 116, 0, 0, 780, 779, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 115, 1, 0, 0, 0, 782, 783, 6, 58, -1, 0, 783, 785, 5, 12, 0, 0, 784, 786, 3, 116, 58, 0, 785, 784, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 792, 1, 0, 0, 0, 787, 788, 5, 98, 0, 0, 788, 789, 3, 116, 58, 0, 789, 790, 5, 83, 0, 0, 790, 791, 3, 116, 58, 0, 791, 793, 1, 0, 0, 0, 792, 787, 1, 0, 0, 0, 793, 794, 1, 0, 0, 0, 794, 792, 1, 0, 0, 0, 794, 795, 1, 0, 0, 0, 795, 798, 1, 0, 0, 0, 796, 797, 5, 25, 0, 0, 797, 799, 3, 116, 58, 0, 798, 796, 1, 0, 0, 0, 798, 799, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 801, 5, 26, 0, 0, 801, 933, 1, 0, 0, 0, 802, 803, 5, 13, 0, 0, 803, 804, 5, 130, 0, 0, 804, 805, 3, 116, 58, 0, 805, 806, 5, 6, 0, 0, 806, 807, 3, 112, 56, 0, 807, 808, 5, 149, 0, 0, 808, 933, 1, 0, 0, 0, 809, 810, 5, 20, 0, 0, 810, 933, 5, 110, 0, 0, 811, 812, 5, 45, 0, 0, 812, 813, 3, 116, 58, 0, 813, 814, 3, 148, 74, 0, 814, 933, 1, 0, 0, 0, 815, 816, 5, 82, 0, 0, 816, 817, 5, 130, 0, 0, 817, 818, 3, 116, 58, 0, 818, 819, 5, 34, 0, 0, 819, 822, 3, 116, 58, 0, 820, 821, 5, 33, 0, 0, 821, 823, 3, 116, 58, 0, 822, 820, 1, 0, 0, 0, 822, 823, 1, 0, 0, 0, 823, 824, 1, 0, 0, 0, 824, 825, 5, 149, 0, 0, 825, 933, 1, 0, 0, 0, 826, 827, 5, 86, 0, 0, 827, 933, 5, 110, 0, 0, 828, 829, 5, 91, 0, 0, 829, 830, 5, 130, 0, 0, 830, 831, 7, 9, 0, 0, 831, 832, 3, 162, 81, 0, 832, 833, 5, 34, 0, 0, 833, 834, 3, 116, 58, 0, 834, 835, 5, 149, 0, 0, 835, 933, 1, 0, 0, 0, 836, 837, 3, 156, 78, 0, 837, 839, 5, 130, 0, 0, 838, 840, 3, 114, 57, 0, 839, 838, 1, 0, 0, 0, 839, 840, 1, 0, 0, 0, 840, 841, 1, 0, 0, 0, 841, 842, 5, 149, 0, 0, 842, 851, 1, 0, 0, 0, 843, 845, 5, 130, 0, 0, 844, 846, 5, 24, 0, 0, 845, 844, 1, 0, 0, 0, 845, 846, 1, 0, 0, 0, 846, 848, 1, 0, 0, 0, 847, 849, 3, 114, 57, 0, 848, 847, 1, 0, 0, 0, 848, 849, 1, 0, 0, 0, 849, 850, 1, 0, 0, 0, 850, 852, 5, 149, 0, 0, 851, 843, 1, 0, 0, 0, 851, 852, 1, 0, 0, 0, 852, 853, 1, 0, 0, 0, 853, 854, 5, 66, 0, 0, 854, 855, 5, 130, 0, 0, 855, 856, 3, 98, 49, 0, 856, 857, 5, 149, 0, 0, 857, 933, 1, 0, 0, 0, 858, 859, 3, 156, 78, 0, 859, 861, 5, 130, 0, 0, 860, 862, 3, 114, 57, 0, 861, 860, 1, 0, 0, 0, 861, 862, 1, 0, 0, 0, 862, 863, 1, 0, 0, 0, 863, 864, 5, 149, 0, 0, 864, 873, 1, 0, 0, 0, 865, 867, 5, 130, 0, 0, 866, 868, 5, 24, 0, 0, 867, 866, 1, 0, 0, 0, 867, 868, 1, 0, 0, 0, 868, 870, 1, 0, 0, 0, 869, 871, 3, 114, 57, 0, 870, 869, 1, 0, 0, 0, 870, 871, 1, 0, 0, 0, 871, 872, 1, 0, 0, 0, 872, 874, 5, 149, 0, 0, 873, 865, 1, 0, 0, 0, 873, 874, 1, 0, 0, 0, 874, 875, 1, 0, 0, 0, 875, 876, 5, 66, 0, 0, 876, 877, 3, 156, 78, 0, 877, 933, 1, 0, 0, 0, 878, 884, 3, 156, 78, 0, 879, 881, 5, 130, 0, 0, 880, 882, 3, 114, 57, 0, 881, 880, 1, 0, 0, 0, 881, 882, 1, 0, 0, 0, 882, 883, 1, 0, 0, 0, 883, 885, 5, 149, 0, 0, 884, 879, 1, 0, 0, 0, 884, 885, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 888, 5, 130, 0, 0, 887, 889, 5, 24, 0, 0, 888, 887, 1, 0, 0, 0, 888, 889, 1, 0, 0, 0, 889, 891, 1, 0, 0, 0, 890, 892, 3, 114, 57, 0, 891, 890, 1, 0, 0, 0, 891, 892, 1, 0, 0, 0, 892, 893, 1, 0, 0, 0, 893, 894, 5, 149, 0, 0, 894, 933, 1, 0, 0, 0, 895, 933, 3, 120, 60, 0, 896, 933, 3, 164, 82, 0, 897, 933, 3, 146, 73, 0, 898, 899, 5, 118, 0, 0, 899, 933, 3, 116, 58, 20, 900, 901, 5, 58, 0, 0, 901, 933, 3, 116, 58, 14, 902, 903, 3, 136, 68, 0, 903, 904, 5, 120, 0, 0, 904, 906, 1, 0, 0, 0, 905, 902, 1, 0, 0, 0, 905, 906, 1, 0, 0, 0, 906, 907, 1, 0, 0, 0, 907, 933, 5, 112, 0, 0, 908, 909, 5, 130, 0, 0, 909, 910, 3, 44, 22, 0, 910, 911, 5, 149, 0, 0, 911, 933, 1, 0, 0, 0, 912, 913, 5, 130, 0, 0, 913, 914, 3, 116, 58, 0, 914, 915, 5, 149, 0, 0, 915, 933, 1, 0, 0, 0, 916, 917, 5, 130, 0, 0, 917, 918, 3, 114, 57, 0, 918, 919, 5, 149, 0, 0, 919, 933, 1, 0, 0, 0, 920, 922, 5, 129, 0, 0, 921, 923, 3, 114, 57, 0, 922, 921, 1, 0, 0, 0, 922, 923, 1, 0, 0, 0, 923, 924, 1, 0, 0, 0, 924, 933, 5, 148, 0, 0, 925, 927, 5, 128, 0, 0, 926, 928, 3, 40, 20, 0, 927, 926, 1, 0, 0, 0, 927, 928, 1, 0, 0, 0, 928, 929, 1, 0, 0, 0, 929, 933, 5, 147, 0, 0, 930, 933, 3, 118, 59, 0, 931, 933, 3, 128, 64, 0, 932, 782, 1, 0, 0, 0, 932, 802, 1, 0, 0, 0, 932, 809, 1, 0, 0, 0, 932, 811, 1, 0, 0, 0, 932, 815, 1, 0, 0, 0, 932, 826, 1, 0, 0, 0, 932, 828, 1, 0, 0, 0, 932, 836, 1, 0, 0, 0, 932, 858, 1, 0, 0, 0, 932, 878, 1, 0, 0, 0, 932, 895, 1, 0, 0, 0, 932, 896, 1, 0, 0, 0, 932, 897, 1, 0, 0, 0, 932, 898, 1, 0, 0, 0, 932, 900, 1, 0, 0, 0, 932, 905, 1, 0, 0, 0, 932, 908, 1, 0, 0, 0, 932, 912, 1, 0, 0, 0, 932, 916, 1, 0, 0, 0, 932, 920, 1, 0, 0, 0, 932, 925, 1, 0, 0, 0, 932, 930, 1, 0, 0, 0, 932, 931, 1, 0, 0, 0, 933, 1044, 1, 0, 0, 0, 934, 938, 10, 19, 0, 0, 935, 939, 5, 112, 0, 0, 936, 939, 5, 151, 0, 0, 937, 939, 5, 138, 0, 0, 938, 935, 1, 0, 0, 0, 938, 936, 1, 0, 0, 0, 938, 937, 1, 0, 0, 0, 939, 940, 1, 0, 0, 0, 940, 1043, 3, 116, 58, 20, 941, 945, 10, 18, 0, 0, 942, 946, 5, 139, 0, 0, 943, 946, 5, 118, 0, 0, 944, 946, 5, 117, 0, 0, 945, 942, 1, 0, 0, 0, 945, 943, 1, 0, 0, 0, 945, 944, 1, 0, 0, 0, 946, 947, 1, 0, 0, 0, 947, 1043, 3, 116, 58, 19, 948, 973, 10, 17, 0, 0, 949, 974, 5, 121, 0, 0, 950, 974, 5, 122, 0, 0, 951, 974, 5, 133, 0, 0, 952, 974, 5, 131, 0, 0, 953, 974, 5, 132, 0, 0, 954, 974, 5, 123, 0, 0, 955, 974, 5, 124, 0, 0, 956, 958, 5, 58, 0, 0, 957, 956, 1, 0, 0, 0, 957, 958, 1, 0, 0, 0, 958, 959, 1, 0, 0, 0, 959, 961, 5, 42, 0, 0, 960, 962, 5, 15, 0, 0, 961, 960, 1, 0, 0, 0, 961, 962, 1, 0, 0, 0, 962, 974, 1, 0, 0, 0, 963, 965, 5, 58, 0, 0, 964, 963, 1, 0, 0, 0, 964, 965, 1, 0, 0, 0, 965, 966, 1, 0, 0, 0, 966, 974, 7, 10, 0, 0, 967, 974, 5, 145, 0, 0, 968, 974, 5, 146, 0, 0, 969, 974, 5, 135, 0, 0, 970, 974, 5, 126, 0, 0, 971, 974, 5, 127, 0, 0, 972, 974, 5, 134, 0, 0, 973, 949, 1, 0, 0, 0, 973, 950, 1, 0, 0, 0, 973, 951, 1, 0, 0, 0, 973, 952, 1, 0, 0, 0, 973, 953, 1, 0, 0, 0, 973, 954, 1, 0, 0, 0, 973, 955, 1, 0, 0, 0, 973, 957, 1, 0, 0, 0, 973, 964, 1, 0, 0, 0, 973, 967, 1, 0, 0, 0, 973, 968, 1, 0, 0, 0, 973, 969, 1, 0, 0, 0, 973, 970, 1, 0, 0, 0, 973, 971, 1, 0, 0, 0, 973, 972, 1, 0, 0, 0, 974, 975, 1, 0, 0, 0, 975, 1043, 3, 116, 58, 18, 976, 977, 10, 15, 0, 0, 977, 978, 5, 137, 0, 0, 978, 1043, 3, 116, 58, 16, 979, 980, 10, 13, 0, 0, 980, 981, 5, 2, 0, 0, 981, 1043, 3, 116, 58, 14, 982, 983, 10, 12, 0, 0, 983, 984, 5, 63, 0, 0, 984, 1043, 3, 116, 58, 13, 985, 987, 10, 11, 0, 0, 986, 988, 5, 58, 0, 0, 987, 986, 1, 0, 0, 0, 987, 988, 1, 0, 0, 0, 988, 989, 1, 0, 0, 0, 989, 990, 5, 9, 0, 0, 990, 991, 3, 116, 58, 0, 991, 992, 5, 2, 0, 0, 992, 993, 3, 116, 58, 12, 993, 1043, 1, 0, 0, 0, 994, 995, 10, 10, 0, 0, 995, 996, 5, 140, 0, 0, 996, 997, 3, 116, 58, 0, 997, 998, 5, 115, 0, 0, 998, 999, 3, 116, 58, 10, 999, 1043, 1, 0, 0, 0, 1000, 1001, 10, 30, 0, 0, 1001, 1003, 5, 130, 0, 0, 1002, 1004, 3, 114, 57, 0, 1003, 1002, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1043, 5, 149, 0, 0, 1006, 1007, 10, 26, 0, 0, 1007, 1008, 5, 129, 0, 0, 1008, 1009, 3, 116, 58, 0, 1009, 1010, 5, 148, 0, 0, 1010, 1043, 1, 0, 0, 0, 1011, 1012, 10, 25, 0, 0, 1012, 1013, 5, 120, 0, 0, 1013, 1043, 5, 108, 0, 0, 1014, 1015, 10, 24, 0, 0, 1015, 1016, 5, 120, 0, 0, 1016, 1043, 3, 156, 78, 0, 1017, 1018, 10, 23, 0, 0, 1018, 1019, 5, 136, 0, 0, 1019, 1020, 5, 129, 0, 0, 1020, 1021, 3, 116, 58, 0, 1021, 1022, 5, 148, 0, 0, 1022, 1043, 1, 0, 0, 0, 1023, 1024, 10, 22, 0, 0, 1024, 1025, 5, 136, 0, 0, 1025, 1043, 5, 108, 0, 0, 1026, 1027, 10, 21, 0, 0, 1027, 1028, 5, 136, 0, 0, 1028, 1043, 3, 156, 78, 0, 1029, 1030, 10, 16, 0, 0, 1030, 1032, 5, 46, 0, 0, 1031, 1033, 5, 58, 0, 0, 1032, 1031, 1, 0, 0, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1043, 5, 59, 0, 0, 1035, 1040, 10, 9, 0, 0, 1036, 1037, 5, 6, 0, 0, 1037, 1041, 3, 156, 78, 0, 1038, 1039, 5, 6, 0, 0, 1039, 1041, 5, 110, 0, 0, 1040, 1036, 1, 0, 0, 0, 1040, 1038, 1, 0, 0, 0, 1041, 1043, 1, 0, 0, 0, 1042, 934, 1, 0, 0, 0, 1042, 941, 1, 0, 0, 0, 1042, 948, 1, 0, 0, 0, 1042, 976, 1, 0, 0, 0, 1042, 979, 1, 0, 0, 0, 1042, 982, 1, 0, 0, 0, 1042, 985, 1, 0, 0, 0, 1042, 994, 1, 0, 0, 0, 1042, 1000, 1, 0, 0, 0, 1042, 1006, 1, 0, 0, 0, 1042, 1011, 1, 0, 0, 0, 1042, 1014, 1, 0, 0, 0, 1042, 1017, 1, 0, 0, 0, 1042, 1023, 1, 0, 0, 0, 1042, 1026, 1, 0, 0, 0, 1042, 1029, 1, 0, 0, 0, 1042, 1035, 1, 0, 0, 0, 1043, 1046, 1, 0, 0, 0, 1044, 1042, 1, 0, 0, 0, 1044, 1045, 1, 0, 0, 0, 1045, 117, 1, 0, 0, 0, 1046, 1044, 1, 0, 0, 0, 1047, 1048, 5, 130, 0, 0, 1048, 1053, 3, 156, 78, 0, 1049, 1050, 5, 116, 0, 0, 1050, 1052, 3, 156, 78, 0, 1051, 1049, 1, 0, 0, 0, 1052, 1055, 1, 0, 0, 0, 1053, 1051, 1, 0, 0, 0, 1053, 1054, 1, 0, 0, 0, 1054, 1057, 1, 0, 0, 0, 1055, 1053, 1, 0, 0, 0, 1056, 1058, 5, 116, 0, 0, 1057, 1056, 1, 0, 0, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 1, 0, 0, 0, 1059, 1060, 5, 149, 0, 0, 1060, 1075, 1, 0, 0, 0, 1061, 1066, 3, 156, 78, 0, 1062, 1063, 5, 116, 0, 0, 1063, 1065, 3, 156, 78, 0, 1064, 1062, 1, 0, 0, 0, 1065, 1068, 1, 0, 0, 0, 1066, 1064, 1, 0, 0, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1070, 1, 0, 0, 0, 1068, 1066, 1, 0, 0, 0, 1069, 1071, 5, 116, 0, 0, 1070, 1069, 1, 0, 0, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1075, 1, 0, 0, 0, 1072, 1073, 5, 130, 0, 0, 1073, 1075, 5, 149, 0, 0, 1074, 1047, 1, 0, 0, 0, 1074, 1061, 1, 0, 0, 0, 1074, 1072, 1, 0, 0, 0, 1075, 1076, 1, 0, 0, 0, 1076, 1079, 5, 111, 0, 0, 1077, 1080, 3, 116, 58, 0, 1078, 1080, 3, 36, 18, 0, 1079, 1077, 1, 0, 0, 0, 1079, 1078, 1, 0, 0, 0, 1080, 119, 1, 0, 0, 0, 1081, 1082, 5, 132, 0, 0, 1082, 1086, 3, 156, 78, 0, 1083, 1085, 3, 122, 61, 0, 1084, 1083, 1, 0, 0, 0, 1085, 1088, 1, 0, 0, 0, 1086, 1084, 1, 0, 0, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1089, 1, 0, 0, 0, 1088, 1086, 1, 0, 0, 0, 1089, 1090, 5, 151, 0, 0, 1090, 1091, 5, 124, 0, 0, 1091, 1114, 1, 0, 0, 0, 1092, 1093, 5, 132, 0, 0, 1093, 1097, 3, 156, 78, 0, 1094, 1096, 3, 122, 61, 0, 1095, 1094, 1, 0, 0, 0, 1096, 1099, 1, 0, 0, 0, 1097, 1095, 1, 0, 0, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1100, 1, 0, 0, 0, 1099, 1097, 1, 0, 0, 0, 1100, 1106, 5, 124, 0, 0, 1101, 1107, 3, 120, 60, 0, 1102, 1103, 5, 128, 0, 0, 1103, 1104, 3, 116, 58, 0, 1104, 1105, 5, 147, 0, 0, 1105, 1107, 1, 0, 0, 0, 1106, 1101, 1, 0, 0, 0, 1106, 1102, 1, 0, 0, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 5, 132, 0, 0, 1109, 1110, 5, 151, 0, 0, 1110, 1111, 3, 156, 78, 0, 1111, 1112, 5, 124, 0, 0, 1112, 1114, 1, 0, 0, 0, 1113, 1081, 1, 0, 0, 0, 1113, 1092, 1, 0, 0, 0, 1114, 121, 1, 0, 0, 0, 1115, 1116, 3, 156, 78, 0, 1116, 1117, 5, 122, 0, 0, 1117, 1118, 3, 162, 81, 0, 1118, 1127, 1, 0, 0, 0, 1119, 1120, 3, 156, 78, 0, 1120, 1121, 5, 122, 0, 0, 1121, 1122, 5, 128, 0, 0, 1122, 1123, 3, 116, 58, 0, 1123, 1124, 5, 147, 0, 0, 1124, 1127, 1, 0, 0, 0, 1125, 1127, 3, 156, 78, 0, 1126, 1115, 1, 0, 0, 0, 1126, 1119, 1, 0, 0, 0, 1126, 1125, 1, 0, 0, 0, 1127, 123, 1, 0, 0, 0, 1128, 1133, 3, 126, 63, 0, 1129, 1130, 5, 116, 0, 0, 1130, 1132, 3, 126, 63, 0, 1131, 1129, 1, 0, 0, 0, 1132, 1135, 1, 0, 0, 0, 1133, 1131, 1, 0, 0, 0, 1133, 1134, 1, 0, 0, 0, 1134, 1137, 1, 0, 0, 0, 1135, 1133, 1, 0, 0, 0, 1136, 1138, 5, 116, 0, 0, 1137, 1136, 1, 0, 0, 0, 1137, 1138, 1, 0, 0, 0, 1138, 125, 1, 0, 0, 0, 1139, 1140, 3, 156, 78, 0, 1140, 1141, 5, 6, 0, 0, 1141, 1142, 5, 130, 0, 0, 1142, 1143, 3, 44, 22, 0, 1143, 1144, 5, 149, 0, 0, 1144, 1150, 1, 0, 0, 0, 1145, 1146, 3, 116, 58, 0, 1146, 1147, 5, 6, 0, 0, 1147, 1148, 3, 156, 78, 0, 1148, 1150, 1, 0, 0, 0, 1149, 1139, 1, 0, 0, 0, 1149, 1145, 1, 0, 0, 0, 1150, 127, 1, 0, 0, 0, 1151, 1159, 3, 160, 80, 0, 1152, 1153, 3, 136, 68, 0, 1153, 1154, 5, 120, 0, 0, 1154, 1156, 1, 0, 0, 0, 1155, 1152, 1, 0, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 1, 0, 0, 0, 1157, 1159, 3, 130, 65, 0, 1158, 1151, 1, 0, 0, 0, 1158, 1155, 1, 0, 0, 0, 1159, 129, 1, 0, 0, 0, 1160, 1165, 3, 156, 78, 0, 1161, 1162, 5, 120, 0, 0, 1162, 1164, 3, 156, 78, 0, 1163, 1161, 1, 0, 0, 0, 1164, 1167, 1, 0, 0, 0, 1165, 1163, 1, 0, 0, 0, 1165, 1166, 1, 0, 0, 0, 1166, 131, 1, 0, 0, 0, 1167, 1165, 1, 0, 0, 0, 1168, 1169, 6, 66, -1, 0, 1169, 1178, 3, 136, 68, 0, 1170, 1178, 3, 134, 67, 0, 1171, 1172, 5, 130, 0, 0, 1172, 1173, 3, 44, 22, 0, 1173, 1174, 5, 149, 0, 0, 1174, 1178, 1, 0, 0, 0, 1175, 1178, 3, 120, 60, 0, 1176, 1178, 3, 160, 80, 0, 1177, 1168, 1, 0, 0, 0, 1177, 1170, 1, 0, 0, 0, 1177, 1171, 1, 0, 0, 0, 1177, 1175, 1, 0, 0, 0, 1177, 1176, 1, 0, 0, 0, 1178, 1187, 1, 0, 0, 0, 1179, 1183, 10, 3, 0, 0, 1180, 1184, 3, 154, 77, 0, 1181, 1182, 5, 6, 0, 0, 1182, 1184, 3, 156, 78, 0, 1183, 1180, 1, 0, 0, 0, 1183, 1181, 1, 0, 0, 0, 1184, 1186, 1, 0, 0, 0, 1185, 1179, 1, 0, 0, 0, 1186, 1189, 1, 0, 0, 0, 1187, 1185, 1, 0, 0, 0, 1187, 1188, 1, 0, 0, 0, 1188, 133, 1, 0, 0, 0, 1189, 1187, 1, 0, 0, 0, 1190, 1191, 3, 156, 78, 0, 1191, 1193, 5, 130, 0, 0, 1192, 1194, 3, 138, 69, 0, 1193, 1192, 1, 0, 0, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 5, 149, 0, 0, 1196, 135, 1, 0, 0, 0, 1197, 1198, 3, 140, 70, 0, 1198, 1199, 5, 120, 0, 0, 1199, 1201, 1, 0, 0, 0, 1200, 1197, 1, 0, 0, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 3, 156, 78, 0, 1203, 137, 1, 0, 0, 0, 1204, 1209, 3, 116, 58, 0, 1205, 1206, 5, 116, 0, 0, 1206, 1208, 3, 116, 58, 0, 1207, 1205, 1, 0, 0, 0, 1208, 1211, 1, 0, 0, 0, 1209, 1207, 1, 0, 0, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1213, 1, 0, 0, 0, 1211, 1209, 1, 0, 0, 0, 1212, 1214, 5, 116, 0, 0, 1213, 1212, 1, 0, 0, 0, 1213, 1214, 1, 0, 0, 0, 1214, 139, 1, 0, 0, 0, 1215, 1216, 3, 156, 78, 0, 1216, 141, 1, 0, 0, 0, 1217, 1226, 5, 106, 0, 0, 1218, 1219, 5, 120, 0, 0, 1219, 1226, 7, 11, 0, 0, 1220, 1221, 5, 108, 0, 0, 1221, 1223, 5, 120, 0, 0, 1222, 1224, 7, 11, 0, 0, 1223, 1222, 1, 0, 0, 0, 1223, 1224, 1, 0, 0, 0, 1224, 1226, 1, 0, 0, 0, 1225, 1217, 1, 0, 0, 0, 1225, 1218, 1, 0, 0, 0, 1225, 1220, 1, 0, 0, 0, 1226, 143, 1, 0, 0, 0, 1227, 1229, 7, 12, 0, 0, 1228, 1227, 1, 0, 0, 0, 1228, 1229, 1, 0, 0, 0, 1229, 1236, 1, 0, 0, 0, 1230, 1237, 3, 142, 71, 0, 1231, 1237, 5, 107, 0, 0, 1232, 1237, 5, 108, 0, 0, 1233, 1237, 5, 109, 0, 0, 1234, 1237, 5, 43, 0, 0, 1235, 1237, 5, 57, 0, 0, 1236, 1230, 1, 0, 0, 0, 1236, 1231, 1, 0, 0, 0, 1236, 1232, 1, 0, 0, 0, 1236, 1233, 1, 0, 0, 0, 1236, 1234, 1, 0, 0, 0, 1236, 1235, 1, 0, 0, 0, 1237, 145, 1, 0, 0, 0, 1238, 1242, 3, 144, 72, 0, 1239, 1242, 5, 110, 0, 0, 1240, 1242, 5, 59, 0, 0, 1241, 1238, 1, 0, 0, 0, 1241, 1239, 1, 0, 0, 0, 1241, 1240, 1, 0, 0, 0, 1242, 147, 1, 0, 0, 0, 1243, 1244, 7, 13, 0, 0, 1244, 149, 1, 0, 0, 0, 1245, 1246, 7, 14, 0, 0, 1246, 151, 1, 0, 0, 0, 1247, 1248, 7, 15, 0, 0, 1248, 153, 1, 0, 0, 0, 1249, 1252, 5, 105, 0, 0, 1250, 1252, 3, 152, 76, 0, 1251, 1249, 1, 0, 0, 0, 1251, 1250, 1, 0, 0, 0, 1252, 155, 1, 0, 0, 0, 1253, 1257, 5, 105, 0, 0, 1254, 1257, 3, 148, 74, 0, 1255, 1257, 3, 150, 75, 0, 1256, 1253, 1, 0, 0, 0, 1256, 1254, 1, 0, 0, 0, 1256, 1255, 1, 0, 0, 0, 1257, 157, 1, 0, 0, 0, 1258, 1259, 3, 162, 81, 0, 1259, 1260, 5, 122, 0, 0, 1260, 1261, 3, 144, 72, 0, 1261, 159, 1, 0, 0, 0, 1262, 1263, 5, 128, 0, 0, 1263, 1264, 3, 130, 65, 0, 1264, 1265, 5, 147, 0, 0, 1265, 161, 1, 0, 0, 0, 1266, 1269, 5, 110, 0, 0, 1267, 1269, 3, 164, 82, 0, 1268, 1266, 1, 0, 0, 0, 1268, 1267, 1, 0, 0, 0, 1269, 163, 1, 0, 0, 0, 1270, 1274, 5, 142, 0, 0, 1271, 1273, 3, 166, 83, 0, 1272, 1271, 1, 0, 0, 0, 1273, 1276, 1, 0, 0, 0, 1274, 1272, 1, 0, 0, 0, 1274, 1275, 1, 0, 0, 0, 1275, 1277, 1, 0, 0, 0, 1276, 1274, 1, 0, 0, 0, 1277, 1278, 5, 144, 0, 0, 1278, 165, 1, 0, 0, 0, 1279, 1280, 5, 157, 0, 0, 1280, 1281, 3, 116, 58, 0, 1281, 1282, 5, 147, 0, 0, 1282, 1285, 1, 0, 0, 0, 1283, 1285, 5, 156, 0, 0, 1284, 1279, 1, 0, 0, 0, 1284, 1283, 1, 0, 0, 0, 1285, 167, 1, 0, 0, 0, 1286, 1290, 5, 143, 0, 0, 1287, 1289, 3, 170, 85, 0, 1288, 1287, 1, 0, 0, 0, 1289, 1292, 1, 0, 0, 0, 1290, 1288, 1, 0, 0, 0, 1290, 1291, 1, 0, 0, 0, 1291, 1293, 1, 0, 0, 0, 1292, 1290, 1, 0, 0, 0, 1293, 1294, 5, 0, 0, 1, 1294, 169, 1, 0, 0, 0, 1295, 1296, 5, 159, 0, 0, 1296, 1297, 3, 116, 58, 0, 1297, 1298, 5, 147, 0, 0, 1298, 1301, 1, 0, 0, 0, 1299, 1301, 5, 158, 0, 0, 1300, 1295, 1, 0, 0, 0, 1300, 1299, 1, 0, 0, 0, 1301, 171, 1, 0, 0, 0, 167, 175, 182, 191, 198, 202, 216, 220, 223, 227, 230, 237, 241, 250, 255, 264, 272, 279, 283, 289, 294, 302, 309, 315, 327, 335, 349, 353, 358, 368, 377, 380, 384, 387, 391, 394, 397, 400, 403, 407, 411, 414, 417, 420, 424, 427, 436, 442, 463, 480, 497, 503, 509, 520, 522, 533, 536, 542, 550, 556, 558, 562, 567, 570, 573, 577, 581, 584, 586, 589, 593, 597, 600, 602, 604, 609, 620, 626, 633, 638, 642, 646, 652, 654, 661, 669, 672, 675, 694, 708, 724, 728, 739, 743, 754, 758, 765, 769, 776, 780, 785, 794, 798, 822, 839, 845, 848, 851, 861, 867, 870, 873, 881, 884, 888, 891, 905, 922, 927, 932, 938, 945, 957, 961, 964, 973, 987, 1003, 1032, 1040, 1042, 1044, 1053, 1057, 1066, 1070, 1074, 1079, 1086, 1097, 1106, 1113, 1126, 1133, 1137, 1149, 1155, 1158, 1165, 1177, 1183, 1187, 1193, 1200, 1209, 1213, 1223, 1225, 1228, 1236, 1241, 1251, 1256, 1268, 1274, 1284, 1290, 1300] \ No newline at end of file diff --git a/posthog/hogql/grammar/HogQLParser.py b/posthog/hogql/grammar/HogQLParser.py index f3a1844f404d3..e983a673610fb 100644 --- a/posthog/hogql/grammar/HogQLParser.py +++ b/posthog/hogql/grammar/HogQLParser.py @@ -1,4 +1,4 @@ -# Generated from HogQLParser.g4 by ANTLR 4.13.1 +# Generated from HogQLParser.g4 by ANTLR 4.13.2 # encoding: utf-8 from antlr4 import * from io import StringIO @@ -10,7 +10,7 @@ def serializedATN(): return [ - 4,1,159,1311,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6, + 4,1,159,1303,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6, 7,6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7, 13,2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2, 20,7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7, @@ -23,520 +23,519 @@ def serializedATN(): 65,2,66,7,66,2,67,7,67,2,68,7,68,2,69,7,69,2,70,7,70,2,71,7,71,2, 72,7,72,2,73,7,73,2,74,7,74,2,75,7,75,2,76,7,76,2,77,7,77,2,78,7, 78,2,79,7,79,2,80,7,80,2,81,7,81,2,82,7,82,2,83,7,83,2,84,7,84,2, - 85,7,85,2,86,7,86,2,87,7,87,1,0,5,0,178,8,0,10,0,12,0,181,9,0,1, - 0,1,0,1,1,1,1,3,1,187,8,1,1,2,1,2,1,3,1,3,1,3,1,3,1,3,3,3,196,8, - 3,1,4,1,4,1,4,5,4,201,8,4,10,4,12,4,204,9,4,1,4,3,4,207,8,4,1,5, - 1,5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,1,5,3,5,221,8,5,1,6,1,6, - 3,6,225,8,6,1,6,3,6,228,8,6,1,7,1,7,3,7,232,8,7,1,7,3,7,235,8,7, - 1,8,1,8,1,8,1,8,1,8,3,8,242,8,8,1,8,1,8,3,8,246,8,8,1,8,1,8,1,9, - 1,9,1,9,5,9,253,8,9,10,9,12,9,256,9,9,1,9,1,9,3,9,260,8,9,1,10,1, - 10,1,10,1,10,1,10,1,10,1,10,3,10,269,8,10,1,11,1,11,1,11,1,11,1, - 11,1,11,3,11,277,8,11,1,12,1,12,1,12,1,12,1,12,3,12,284,8,12,1,12, - 1,12,3,12,288,8,12,1,12,1,12,1,12,1,12,3,12,294,8,12,1,12,1,12,1, - 12,3,12,299,8,12,1,13,1,13,1,13,1,13,1,13,1,13,3,13,307,8,13,1,13, - 1,13,1,13,1,13,1,13,3,13,314,8,13,1,14,1,14,1,14,1,14,3,14,320,8, - 14,1,14,1,14,1,14,1,15,1,15,1,15,1,15,1,15,1,16,1,16,3,16,332,8, - 16,1,17,1,17,1,18,1,18,5,18,338,8,18,10,18,12,18,341,9,18,1,18,1, - 18,1,19,1,19,1,19,1,19,1,20,1,20,1,20,5,20,352,8,20,10,20,12,20, - 355,9,20,1,20,3,20,358,8,20,1,21,1,21,1,21,3,21,363,8,21,1,21,1, - 21,1,22,1,22,1,22,1,22,5,22,371,8,22,10,22,12,22,374,9,22,1,23,1, - 23,1,23,1,23,1,23,1,23,3,23,382,8,23,1,24,3,24,385,8,24,1,24,1,24, - 3,24,389,8,24,1,24,3,24,392,8,24,1,24,1,24,3,24,396,8,24,1,24,3, - 24,399,8,24,1,24,3,24,402,8,24,1,24,3,24,405,8,24,1,24,3,24,408, - 8,24,1,24,1,24,3,24,412,8,24,1,24,1,24,3,24,416,8,24,1,24,3,24,419, - 8,24,1,24,3,24,422,8,24,1,24,3,24,425,8,24,1,24,1,24,3,24,429,8, - 24,1,24,3,24,432,8,24,1,25,1,25,1,25,1,26,1,26,1,26,1,26,3,26,441, - 8,26,1,27,1,27,1,27,1,28,3,28,447,8,28,1,28,1,28,1,28,1,28,1,29, - 1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,5,29, - 466,8,29,10,29,12,29,469,9,29,1,30,1,30,1,30,1,31,1,31,1,31,1,32, - 1,32,1,32,1,32,1,32,1,32,1,32,1,32,3,32,485,8,32,1,33,1,33,1,33, - 1,34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,3,36, - 502,8,36,1,36,1,36,1,36,1,36,3,36,508,8,36,1,36,1,36,1,36,1,36,3, - 36,514,8,36,1,36,1,36,1,36,1,36,1,36,1,36,1,36,1,36,1,36,3,36,525, - 8,36,3,36,527,8,36,1,37,1,37,1,37,1,38,1,38,1,38,1,39,1,39,1,39, - 3,39,538,8,39,1,39,3,39,541,8,39,1,39,1,39,1,39,1,39,3,39,547,8, - 39,1,39,1,39,1,39,1,39,1,39,1,39,3,39,555,8,39,1,39,1,39,1,39,1, - 39,5,39,561,8,39,10,39,12,39,564,9,39,1,40,3,40,567,8,40,1,40,1, - 40,1,40,3,40,572,8,40,1,40,3,40,575,8,40,1,40,3,40,578,8,40,1,40, - 1,40,3,40,582,8,40,1,40,1,40,3,40,586,8,40,1,40,3,40,589,8,40,3, - 40,591,8,40,1,40,3,40,594,8,40,1,40,1,40,3,40,598,8,40,1,40,1,40, - 3,40,602,8,40,1,40,3,40,605,8,40,3,40,607,8,40,3,40,609,8,40,1,41, - 1,41,1,41,3,41,614,8,41,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42, - 1,42,3,42,625,8,42,1,43,1,43,1,43,1,43,3,43,631,8,43,1,44,1,44,1, - 44,5,44,636,8,44,10,44,12,44,639,9,44,1,45,1,45,3,45,643,8,45,1, - 45,1,45,3,45,647,8,45,1,45,1,45,3,45,651,8,45,1,46,1,46,1,46,1,46, - 3,46,657,8,46,3,46,659,8,46,1,47,1,47,1,47,5,47,664,8,47,10,47,12, - 47,667,9,47,1,48,1,48,1,48,1,48,1,49,3,49,674,8,49,1,49,3,49,677, - 8,49,1,49,3,49,680,8,49,1,50,1,50,1,50,1,50,1,51,1,51,1,51,1,51, - 1,52,1,52,1,52,1,53,1,53,1,53,1,53,1,53,1,53,3,53,699,8,53,1,54, - 1,54,1,54,1,54,1,54,1,54,1,54,1,54,1,54,1,54,1,54,1,54,3,54,713, - 8,54,1,55,1,55,1,55,1,56,1,56,1,56,1,56,1,56,1,56,1,56,1,56,1,56, - 5,56,727,8,56,10,56,12,56,730,9,56,1,56,3,56,733,8,56,1,56,1,56, - 1,56,1,56,1,56,1,56,1,56,5,56,742,8,56,10,56,12,56,745,9,56,1,56, - 3,56,748,8,56,1,56,1,56,1,56,1,56,1,56,1,56,1,56,5,56,757,8,56,10, - 56,12,56,760,9,56,1,56,3,56,763,8,56,1,56,1,56,1,56,1,56,1,56,3, - 56,770,8,56,1,56,1,56,3,56,774,8,56,1,57,1,57,1,57,5,57,779,8,57, - 10,57,12,57,782,9,57,1,57,3,57,785,8,57,1,58,1,58,1,58,3,58,790, - 8,58,1,58,1,58,1,58,1,58,1,58,4,58,797,8,58,11,58,12,58,798,1,58, - 1,58,3,58,803,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, + 85,7,85,1,0,5,0,174,8,0,10,0,12,0,177,9,0,1,0,1,0,1,1,1,1,3,1,183, + 8,1,1,2,1,2,1,3,1,3,1,3,1,3,1,3,3,3,192,8,3,1,4,1,4,1,4,5,4,197, + 8,4,10,4,12,4,200,9,4,1,4,3,4,203,8,4,1,5,1,5,1,5,1,5,1,5,1,5,1, + 5,1,5,1,5,1,5,1,5,1,5,3,5,217,8,5,1,6,1,6,3,6,221,8,6,1,6,3,6,224, + 8,6,1,7,1,7,3,7,228,8,7,1,7,3,7,231,8,7,1,8,1,8,1,8,1,8,1,8,3,8, + 238,8,8,1,8,1,8,3,8,242,8,8,1,8,1,8,1,9,1,9,1,9,5,9,249,8,9,10,9, + 12,9,252,9,9,1,9,1,9,3,9,256,8,9,1,10,1,10,1,10,1,10,1,10,1,10,1, + 10,3,10,265,8,10,1,11,1,11,1,11,1,11,1,11,1,11,3,11,273,8,11,1,12, + 1,12,1,12,1,12,1,12,3,12,280,8,12,1,12,1,12,3,12,284,8,12,1,12,1, + 12,1,12,1,12,3,12,290,8,12,1,12,1,12,1,12,3,12,295,8,12,1,13,1,13, + 1,13,1,13,1,13,1,13,3,13,303,8,13,1,13,1,13,1,13,1,13,1,13,3,13, + 310,8,13,1,14,1,14,1,14,1,14,3,14,316,8,14,1,14,1,14,1,14,1,15,1, + 15,1,15,1,15,1,15,1,16,1,16,3,16,328,8,16,1,17,1,17,1,18,1,18,5, + 18,334,8,18,10,18,12,18,337,9,18,1,18,1,18,1,19,1,19,1,19,1,19,1, + 20,1,20,1,20,5,20,348,8,20,10,20,12,20,351,9,20,1,20,3,20,354,8, + 20,1,21,1,21,1,21,3,21,359,8,21,1,21,1,21,1,22,1,22,1,22,1,22,5, + 22,367,8,22,10,22,12,22,370,9,22,1,23,1,23,1,23,1,23,1,23,1,23,3, + 23,378,8,23,1,24,3,24,381,8,24,1,24,1,24,3,24,385,8,24,1,24,3,24, + 388,8,24,1,24,1,24,3,24,392,8,24,1,24,3,24,395,8,24,1,24,3,24,398, + 8,24,1,24,3,24,401,8,24,1,24,3,24,404,8,24,1,24,1,24,3,24,408,8, + 24,1,24,1,24,3,24,412,8,24,1,24,3,24,415,8,24,1,24,3,24,418,8,24, + 1,24,3,24,421,8,24,1,24,1,24,3,24,425,8,24,1,24,3,24,428,8,24,1, + 25,1,25,1,25,1,26,1,26,1,26,1,26,3,26,437,8,26,1,27,1,27,1,27,1, + 28,3,28,443,8,28,1,28,1,28,1,28,1,28,1,29,1,29,1,29,1,29,1,29,1, + 29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,5,29,462,8,29,10,29,12,29, + 465,9,29,1,30,1,30,1,30,1,31,1,31,1,31,1,32,1,32,1,32,1,32,1,32, + 1,32,1,32,1,32,3,32,481,8,32,1,33,1,33,1,33,1,34,1,34,1,34,1,34, + 1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,3,36,498,8,36,1,36,1,36, + 1,36,1,36,3,36,504,8,36,1,36,1,36,1,36,1,36,3,36,510,8,36,1,36,1, + 36,1,36,1,36,1,36,1,36,1,36,1,36,1,36,3,36,521,8,36,3,36,523,8,36, + 1,37,1,37,1,37,1,38,1,38,1,38,1,39,1,39,1,39,3,39,534,8,39,1,39, + 3,39,537,8,39,1,39,1,39,1,39,1,39,3,39,543,8,39,1,39,1,39,1,39,1, + 39,1,39,1,39,3,39,551,8,39,1,39,1,39,1,39,1,39,5,39,557,8,39,10, + 39,12,39,560,9,39,1,40,3,40,563,8,40,1,40,1,40,1,40,3,40,568,8,40, + 1,40,3,40,571,8,40,1,40,3,40,574,8,40,1,40,1,40,3,40,578,8,40,1, + 40,1,40,3,40,582,8,40,1,40,3,40,585,8,40,3,40,587,8,40,1,40,3,40, + 590,8,40,1,40,1,40,3,40,594,8,40,1,40,1,40,3,40,598,8,40,1,40,3, + 40,601,8,40,3,40,603,8,40,3,40,605,8,40,1,41,1,41,1,41,3,41,610, + 8,41,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,3,42,621,8,42, + 1,43,1,43,1,43,1,43,3,43,627,8,43,1,44,1,44,1,44,5,44,632,8,44,10, + 44,12,44,635,9,44,1,45,1,45,3,45,639,8,45,1,45,1,45,3,45,643,8,45, + 1,45,1,45,3,45,647,8,45,1,46,1,46,1,46,1,46,3,46,653,8,46,3,46,655, + 8,46,1,47,1,47,1,47,5,47,660,8,47,10,47,12,47,663,9,47,1,48,1,48, + 1,48,1,48,1,49,3,49,670,8,49,1,49,3,49,673,8,49,1,49,3,49,676,8, + 49,1,50,1,50,1,50,1,50,1,51,1,51,1,51,1,51,1,52,1,52,1,52,1,53,1, + 53,1,53,1,53,1,53,1,53,3,53,695,8,53,1,54,1,54,1,54,1,54,1,54,1, + 54,1,54,1,54,1,54,1,54,1,54,1,54,3,54,709,8,54,1,55,1,55,1,55,1, + 56,1,56,1,56,1,56,1,56,1,56,1,56,1,56,1,56,5,56,723,8,56,10,56,12, + 56,726,9,56,1,56,3,56,729,8,56,1,56,1,56,1,56,1,56,1,56,1,56,1,56, + 5,56,738,8,56,10,56,12,56,741,9,56,1,56,3,56,744,8,56,1,56,1,56, + 1,56,1,56,1,56,1,56,1,56,5,56,753,8,56,10,56,12,56,756,9,56,1,56, + 3,56,759,8,56,1,56,1,56,1,56,1,56,1,56,3,56,766,8,56,1,56,1,56,3, + 56,770,8,56,1,57,1,57,1,57,5,57,775,8,57,10,57,12,57,778,9,57,1, + 57,3,57,781,8,57,1,58,1,58,1,58,3,58,786,8,58,1,58,1,58,1,58,1,58, + 1,58,4,58,793,8,58,11,58,12,58,794,1,58,1,58,3,58,799,8,58,1,58, 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, - 3,58,827,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, - 1,58,1,58,1,58,1,58,1,58,3,58,844,8,58,1,58,1,58,1,58,1,58,3,58, - 850,8,58,1,58,3,58,853,8,58,1,58,3,58,856,8,58,1,58,1,58,1,58,1, - 58,1,58,1,58,1,58,1,58,3,58,866,8,58,1,58,1,58,1,58,1,58,3,58,872, - 8,58,1,58,3,58,875,8,58,1,58,3,58,878,8,58,1,58,1,58,1,58,1,58,1, - 58,1,58,3,58,886,8,58,1,58,3,58,889,8,58,1,58,1,58,3,58,893,8,58, - 1,58,3,58,896,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, - 1,58,1,58,1,58,3,58,910,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, - 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,927,8,58,1,58,1,58, - 1,58,3,58,932,8,58,1,58,1,58,3,58,936,8,58,1,58,1,58,1,58,1,58,3, - 58,942,8,58,1,58,1,58,1,58,1,58,1,58,3,58,949,8,58,1,58,1,58,1,58, - 1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,961,8,58,1,58,1,58,3,58, - 965,8,58,1,58,3,58,968,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3, - 58,977,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1, - 58,1,58,3,58,991,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1, - 58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1, - 58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1, - 58,1,58,1,58,3,58,1030,8,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,1038, - 8,58,5,58,1040,8,58,10,58,12,58,1043,9,58,1,59,1,59,1,59,5,59,1048, - 8,59,10,59,12,59,1051,9,59,1,59,3,59,1054,8,59,1,60,1,60,3,60,1058, - 8,60,1,61,1,61,1,61,1,61,5,61,1064,8,61,10,61,12,61,1067,9,61,1, - 61,3,61,1070,8,61,1,61,1,61,1,61,1,61,1,61,5,61,1077,8,61,10,61, - 12,61,1080,9,61,1,61,3,61,1083,8,61,3,61,1085,8,61,1,61,1,61,1,61, - 1,62,1,62,1,62,5,62,1093,8,62,10,62,12,62,1096,9,62,1,62,1,62,1, - 62,1,62,1,62,1,62,5,62,1104,8,62,10,62,12,62,1107,9,62,1,62,1,62, - 1,62,1,62,1,62,1,62,3,62,1115,8,62,1,62,1,62,1,62,1,62,1,62,3,62, - 1122,8,62,1,63,1,63,1,63,1,63,1,63,1,63,1,63,1,63,1,63,1,63,1,63, - 3,63,1135,8,63,1,64,1,64,1,64,5,64,1140,8,64,10,64,12,64,1143,9, - 64,1,64,3,64,1146,8,64,1,65,1,65,1,65,1,65,1,65,1,65,1,65,1,65,1, - 65,1,65,3,65,1158,8,65,1,66,1,66,1,66,1,66,3,66,1164,8,66,1,66,3, - 66,1167,8,66,1,67,1,67,1,67,5,67,1172,8,67,10,67,12,67,1175,9,67, - 1,68,1,68,1,68,1,68,1,68,1,68,1,68,1,68,1,68,3,68,1186,8,68,1,68, - 1,68,1,68,1,68,3,68,1192,8,68,5,68,1194,8,68,10,68,12,68,1197,9, - 68,1,69,1,69,1,69,3,69,1202,8,69,1,69,1,69,1,70,1,70,1,70,3,70,1209, - 8,70,1,70,1,70,1,71,1,71,1,71,5,71,1216,8,71,10,71,12,71,1219,9, - 71,1,71,3,71,1222,8,71,1,72,1,72,1,73,1,73,1,73,1,73,1,73,1,73,3, - 73,1232,8,73,3,73,1234,8,73,1,74,3,74,1237,8,74,1,74,1,74,1,74,1, - 74,1,74,1,74,3,74,1245,8,74,1,75,1,75,1,75,3,75,1250,8,75,1,76,1, - 76,1,77,1,77,1,78,1,78,1,79,1,79,3,79,1260,8,79,1,80,1,80,1,80,3, - 80,1265,8,80,1,81,1,81,1,81,1,81,1,82,1,82,1,82,1,82,1,83,1,83,3, - 83,1277,8,83,1,84,1,84,5,84,1281,8,84,10,84,12,84,1284,9,84,1,84, - 1,84,1,85,1,85,1,85,1,85,1,85,3,85,1293,8,85,1,86,1,86,5,86,1297, - 8,86,10,86,12,86,1300,9,86,1,86,1,86,1,87,1,87,1,87,1,87,1,87,3, - 87,1309,8,87,1,87,0,3,78,116,136,88,0,2,4,6,8,10,12,14,16,18,20, - 22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64, - 66,68,70,72,74,76,78,80,82,84,86,88,90,92,94,96,98,100,102,104,106, - 108,110,112,114,116,118,120,122,124,126,128,130,132,134,136,138, - 140,142,144,146,148,150,152,154,156,158,160,162,164,166,168,170, - 172,174,0,16,2,0,18,18,74,74,2,0,44,44,51,51,3,0,1,1,4,4,8,8,4,0, - 1,1,3,4,8,8,80,80,2,0,51,51,73,73,2,0,1,1,4,4,2,0,7,7,22,23,2,0, - 30,30,49,49,2,0,71,71,76,76,3,0,10,10,50,50,90,90,2,0,41,41,53,53, - 1,0,107,108,2,0,118,118,139,139,7,0,21,21,38,38,55,56,70,70,78,78, - 97,97,103,103,16,0,1,13,15,20,22,28,30,30,32,37,39,42,44,51,53,54, - 58,58,60,69,71,77,79,83,85,92,94,96,98,99,101,102,4,0,20,20,30,30, - 39,39,48,48,1479,0,179,1,0,0,0,2,186,1,0,0,0,4,188,1,0,0,0,6,190, - 1,0,0,0,8,197,1,0,0,0,10,220,1,0,0,0,12,222,1,0,0,0,14,229,1,0,0, - 0,16,236,1,0,0,0,18,249,1,0,0,0,20,261,1,0,0,0,22,270,1,0,0,0,24, - 278,1,0,0,0,26,300,1,0,0,0,28,315,1,0,0,0,30,324,1,0,0,0,32,329, - 1,0,0,0,34,333,1,0,0,0,36,335,1,0,0,0,38,344,1,0,0,0,40,348,1,0, - 0,0,42,362,1,0,0,0,44,366,1,0,0,0,46,381,1,0,0,0,48,384,1,0,0,0, - 50,433,1,0,0,0,52,436,1,0,0,0,54,442,1,0,0,0,56,446,1,0,0,0,58,452, - 1,0,0,0,60,470,1,0,0,0,62,473,1,0,0,0,64,476,1,0,0,0,66,486,1,0, - 0,0,68,489,1,0,0,0,70,493,1,0,0,0,72,526,1,0,0,0,74,528,1,0,0,0, - 76,531,1,0,0,0,78,546,1,0,0,0,80,608,1,0,0,0,82,613,1,0,0,0,84,624, - 1,0,0,0,86,626,1,0,0,0,88,632,1,0,0,0,90,640,1,0,0,0,92,658,1,0, - 0,0,94,660,1,0,0,0,96,668,1,0,0,0,98,673,1,0,0,0,100,681,1,0,0,0, - 102,685,1,0,0,0,104,689,1,0,0,0,106,698,1,0,0,0,108,712,1,0,0,0, - 110,714,1,0,0,0,112,773,1,0,0,0,114,775,1,0,0,0,116,935,1,0,0,0, - 118,1044,1,0,0,0,120,1057,1,0,0,0,122,1084,1,0,0,0,124,1121,1,0, - 0,0,126,1134,1,0,0,0,128,1136,1,0,0,0,130,1157,1,0,0,0,132,1166, - 1,0,0,0,134,1168,1,0,0,0,136,1185,1,0,0,0,138,1198,1,0,0,0,140,1208, - 1,0,0,0,142,1212,1,0,0,0,144,1223,1,0,0,0,146,1233,1,0,0,0,148,1236, - 1,0,0,0,150,1249,1,0,0,0,152,1251,1,0,0,0,154,1253,1,0,0,0,156,1255, - 1,0,0,0,158,1259,1,0,0,0,160,1264,1,0,0,0,162,1266,1,0,0,0,164,1270, - 1,0,0,0,166,1276,1,0,0,0,168,1278,1,0,0,0,170,1292,1,0,0,0,172,1294, - 1,0,0,0,174,1308,1,0,0,0,176,178,3,2,1,0,177,176,1,0,0,0,178,181, - 1,0,0,0,179,177,1,0,0,0,179,180,1,0,0,0,180,182,1,0,0,0,181,179, - 1,0,0,0,182,183,5,0,0,1,183,1,1,0,0,0,184,187,3,6,3,0,185,187,3, - 10,5,0,186,184,1,0,0,0,186,185,1,0,0,0,187,3,1,0,0,0,188,189,3,116, - 58,0,189,5,1,0,0,0,190,191,5,52,0,0,191,195,3,160,80,0,192,193,5, - 115,0,0,193,194,5,122,0,0,194,196,3,4,2,0,195,192,1,0,0,0,195,196, - 1,0,0,0,196,7,1,0,0,0,197,202,3,160,80,0,198,199,5,116,0,0,199,201, - 3,160,80,0,200,198,1,0,0,0,201,204,1,0,0,0,202,200,1,0,0,0,202,203, - 1,0,0,0,203,206,1,0,0,0,204,202,1,0,0,0,205,207,5,116,0,0,206,205, - 1,0,0,0,206,207,1,0,0,0,207,9,1,0,0,0,208,221,3,12,6,0,209,221,3, - 14,7,0,210,221,3,18,9,0,211,221,3,20,10,0,212,221,3,22,11,0,213, - 221,3,26,13,0,214,221,3,24,12,0,215,221,3,28,14,0,216,221,3,30,15, - 0,217,221,3,36,18,0,218,221,3,32,16,0,219,221,3,34,17,0,220,208, - 1,0,0,0,220,209,1,0,0,0,220,210,1,0,0,0,220,211,1,0,0,0,220,212, - 1,0,0,0,220,213,1,0,0,0,220,214,1,0,0,0,220,215,1,0,0,0,220,216, - 1,0,0,0,220,217,1,0,0,0,220,218,1,0,0,0,220,219,1,0,0,0,221,11,1, - 0,0,0,222,224,5,72,0,0,223,225,3,4,2,0,224,223,1,0,0,0,224,225,1, - 0,0,0,225,227,1,0,0,0,226,228,5,150,0,0,227,226,1,0,0,0,227,228, - 1,0,0,0,228,13,1,0,0,0,229,231,5,84,0,0,230,232,3,4,2,0,231,230, - 1,0,0,0,231,232,1,0,0,0,232,234,1,0,0,0,233,235,5,150,0,0,234,233, - 1,0,0,0,234,235,1,0,0,0,235,15,1,0,0,0,236,245,5,14,0,0,237,238, - 5,130,0,0,238,241,3,160,80,0,239,240,5,115,0,0,240,242,3,160,80, - 0,241,239,1,0,0,0,241,242,1,0,0,0,242,243,1,0,0,0,243,244,5,149, - 0,0,244,246,1,0,0,0,245,237,1,0,0,0,245,246,1,0,0,0,246,247,1,0, - 0,0,247,248,3,36,18,0,248,17,1,0,0,0,249,250,5,93,0,0,250,254,3, - 36,18,0,251,253,3,16,8,0,252,251,1,0,0,0,253,256,1,0,0,0,254,252, - 1,0,0,0,254,255,1,0,0,0,255,259,1,0,0,0,256,254,1,0,0,0,257,258, - 5,29,0,0,258,260,3,36,18,0,259,257,1,0,0,0,259,260,1,0,0,0,260,19, - 1,0,0,0,261,262,5,40,0,0,262,263,5,130,0,0,263,264,3,4,2,0,264,265, - 5,149,0,0,265,268,3,10,5,0,266,267,5,25,0,0,267,269,3,10,5,0,268, - 266,1,0,0,0,268,269,1,0,0,0,269,21,1,0,0,0,270,271,5,100,0,0,271, - 272,5,130,0,0,272,273,3,4,2,0,273,274,5,149,0,0,274,276,3,10,5,0, - 275,277,5,150,0,0,276,275,1,0,0,0,276,277,1,0,0,0,277,23,1,0,0,0, - 278,279,5,33,0,0,279,283,5,130,0,0,280,284,3,6,3,0,281,284,3,30, - 15,0,282,284,3,4,2,0,283,280,1,0,0,0,283,281,1,0,0,0,283,282,1,0, - 0,0,283,284,1,0,0,0,284,285,1,0,0,0,285,287,5,150,0,0,286,288,3, - 4,2,0,287,286,1,0,0,0,287,288,1,0,0,0,288,289,1,0,0,0,289,293,5, - 150,0,0,290,294,3,6,3,0,291,294,3,30,15,0,292,294,3,4,2,0,293,290, - 1,0,0,0,293,291,1,0,0,0,293,292,1,0,0,0,293,294,1,0,0,0,294,295, - 1,0,0,0,295,296,5,149,0,0,296,298,3,10,5,0,297,299,5,150,0,0,298, - 297,1,0,0,0,298,299,1,0,0,0,299,25,1,0,0,0,300,301,5,33,0,0,301, - 302,5,130,0,0,302,303,5,52,0,0,303,306,3,160,80,0,304,305,5,116, - 0,0,305,307,3,160,80,0,306,304,1,0,0,0,306,307,1,0,0,0,307,308,1, - 0,0,0,308,309,5,42,0,0,309,310,3,4,2,0,310,311,5,149,0,0,311,313, - 3,10,5,0,312,314,5,150,0,0,313,312,1,0,0,0,313,314,1,0,0,0,314,27, - 1,0,0,0,315,316,5,31,0,0,316,317,3,160,80,0,317,319,5,130,0,0,318, - 320,3,8,4,0,319,318,1,0,0,0,319,320,1,0,0,0,320,321,1,0,0,0,321, - 322,5,149,0,0,322,323,3,36,18,0,323,29,1,0,0,0,324,325,3,4,2,0,325, - 326,5,115,0,0,326,327,5,122,0,0,327,328,3,4,2,0,328,31,1,0,0,0,329, - 331,3,4,2,0,330,332,5,150,0,0,331,330,1,0,0,0,331,332,1,0,0,0,332, - 33,1,0,0,0,333,334,5,150,0,0,334,35,1,0,0,0,335,339,5,128,0,0,336, - 338,3,2,1,0,337,336,1,0,0,0,338,341,1,0,0,0,339,337,1,0,0,0,339, - 340,1,0,0,0,340,342,1,0,0,0,341,339,1,0,0,0,342,343,5,147,0,0,343, - 37,1,0,0,0,344,345,3,4,2,0,345,346,5,115,0,0,346,347,3,4,2,0,347, - 39,1,0,0,0,348,353,3,38,19,0,349,350,5,116,0,0,350,352,3,38,19,0, - 351,349,1,0,0,0,352,355,1,0,0,0,353,351,1,0,0,0,353,354,1,0,0,0, - 354,357,1,0,0,0,355,353,1,0,0,0,356,358,5,116,0,0,357,356,1,0,0, - 0,357,358,1,0,0,0,358,41,1,0,0,0,359,363,3,44,22,0,360,363,3,48, - 24,0,361,363,3,124,62,0,362,359,1,0,0,0,362,360,1,0,0,0,362,361, - 1,0,0,0,363,364,1,0,0,0,364,365,5,0,0,1,365,43,1,0,0,0,366,372,3, - 46,23,0,367,368,5,95,0,0,368,369,5,1,0,0,369,371,3,46,23,0,370,367, - 1,0,0,0,371,374,1,0,0,0,372,370,1,0,0,0,372,373,1,0,0,0,373,45,1, - 0,0,0,374,372,1,0,0,0,375,382,3,48,24,0,376,377,5,130,0,0,377,378, - 3,44,22,0,378,379,5,149,0,0,379,382,1,0,0,0,380,382,3,164,82,0,381, - 375,1,0,0,0,381,376,1,0,0,0,381,380,1,0,0,0,382,47,1,0,0,0,383,385, - 3,50,25,0,384,383,1,0,0,0,384,385,1,0,0,0,385,386,1,0,0,0,386,388, - 5,79,0,0,387,389,5,24,0,0,388,387,1,0,0,0,388,389,1,0,0,0,389,391, - 1,0,0,0,390,392,3,52,26,0,391,390,1,0,0,0,391,392,1,0,0,0,392,393, - 1,0,0,0,393,395,3,114,57,0,394,396,3,54,27,0,395,394,1,0,0,0,395, - 396,1,0,0,0,396,398,1,0,0,0,397,399,3,56,28,0,398,397,1,0,0,0,398, - 399,1,0,0,0,399,401,1,0,0,0,400,402,3,60,30,0,401,400,1,0,0,0,401, - 402,1,0,0,0,402,404,1,0,0,0,403,405,3,62,31,0,404,403,1,0,0,0,404, - 405,1,0,0,0,405,407,1,0,0,0,406,408,3,64,32,0,407,406,1,0,0,0,407, - 408,1,0,0,0,408,411,1,0,0,0,409,410,5,102,0,0,410,412,7,0,0,0,411, - 409,1,0,0,0,411,412,1,0,0,0,412,415,1,0,0,0,413,414,5,102,0,0,414, - 416,5,89,0,0,415,413,1,0,0,0,415,416,1,0,0,0,416,418,1,0,0,0,417, - 419,3,66,33,0,418,417,1,0,0,0,418,419,1,0,0,0,419,421,1,0,0,0,420, - 422,3,58,29,0,421,420,1,0,0,0,421,422,1,0,0,0,422,424,1,0,0,0,423, - 425,3,68,34,0,424,423,1,0,0,0,424,425,1,0,0,0,425,428,1,0,0,0,426, - 429,3,72,36,0,427,429,3,74,37,0,428,426,1,0,0,0,428,427,1,0,0,0, - 428,429,1,0,0,0,429,431,1,0,0,0,430,432,3,76,38,0,431,430,1,0,0, - 0,431,432,1,0,0,0,432,49,1,0,0,0,433,434,5,102,0,0,434,435,3,128, - 64,0,435,51,1,0,0,0,436,437,5,88,0,0,437,440,5,108,0,0,438,439,5, - 102,0,0,439,441,5,85,0,0,440,438,1,0,0,0,440,441,1,0,0,0,441,53, - 1,0,0,0,442,443,5,34,0,0,443,444,3,78,39,0,444,55,1,0,0,0,445,447, - 7,1,0,0,446,445,1,0,0,0,446,447,1,0,0,0,447,448,1,0,0,0,448,449, - 5,5,0,0,449,450,5,47,0,0,450,451,3,114,57,0,451,57,1,0,0,0,452,453, - 5,101,0,0,453,454,3,160,80,0,454,455,5,6,0,0,455,456,5,130,0,0,456, - 457,3,98,49,0,457,467,5,149,0,0,458,459,5,116,0,0,459,460,3,160, - 80,0,460,461,5,6,0,0,461,462,5,130,0,0,462,463,3,98,49,0,463,464, - 5,149,0,0,464,466,1,0,0,0,465,458,1,0,0,0,466,469,1,0,0,0,467,465, - 1,0,0,0,467,468,1,0,0,0,468,59,1,0,0,0,469,467,1,0,0,0,470,471,5, - 69,0,0,471,472,3,116,58,0,472,61,1,0,0,0,473,474,5,99,0,0,474,475, - 3,116,58,0,475,63,1,0,0,0,476,477,5,36,0,0,477,484,5,11,0,0,478, - 479,7,0,0,0,479,480,5,130,0,0,480,481,3,114,57,0,481,482,5,149,0, - 0,482,485,1,0,0,0,483,485,3,114,57,0,484,478,1,0,0,0,484,483,1,0, - 0,0,485,65,1,0,0,0,486,487,5,37,0,0,487,488,3,116,58,0,488,67,1, - 0,0,0,489,490,5,64,0,0,490,491,5,11,0,0,491,492,3,88,44,0,492,69, - 1,0,0,0,493,494,5,64,0,0,494,495,5,11,0,0,495,496,3,114,57,0,496, - 71,1,0,0,0,497,498,5,54,0,0,498,501,3,116,58,0,499,500,5,116,0,0, - 500,502,3,116,58,0,501,499,1,0,0,0,501,502,1,0,0,0,502,507,1,0,0, - 0,503,504,5,102,0,0,504,508,5,85,0,0,505,506,5,11,0,0,506,508,3, - 114,57,0,507,503,1,0,0,0,507,505,1,0,0,0,507,508,1,0,0,0,508,527, - 1,0,0,0,509,510,5,54,0,0,510,513,3,116,58,0,511,512,5,102,0,0,512, - 514,5,85,0,0,513,511,1,0,0,0,513,514,1,0,0,0,514,515,1,0,0,0,515, - 516,5,61,0,0,516,517,3,116,58,0,517,527,1,0,0,0,518,519,5,54,0,0, - 519,520,3,116,58,0,520,521,5,61,0,0,521,524,3,116,58,0,522,523,5, - 11,0,0,523,525,3,114,57,0,524,522,1,0,0,0,524,525,1,0,0,0,525,527, - 1,0,0,0,526,497,1,0,0,0,526,509,1,0,0,0,526,518,1,0,0,0,527,73,1, - 0,0,0,528,529,5,61,0,0,529,530,3,116,58,0,530,75,1,0,0,0,531,532, - 5,81,0,0,532,533,3,94,47,0,533,77,1,0,0,0,534,535,6,39,-1,0,535, - 537,3,136,68,0,536,538,5,28,0,0,537,536,1,0,0,0,537,538,1,0,0,0, - 538,540,1,0,0,0,539,541,3,86,43,0,540,539,1,0,0,0,540,541,1,0,0, - 0,541,547,1,0,0,0,542,543,5,130,0,0,543,544,3,78,39,0,544,545,5, - 149,0,0,545,547,1,0,0,0,546,534,1,0,0,0,546,542,1,0,0,0,547,562, - 1,0,0,0,548,549,10,3,0,0,549,550,3,82,41,0,550,551,3,78,39,4,551, - 561,1,0,0,0,552,554,10,4,0,0,553,555,3,80,40,0,554,553,1,0,0,0,554, - 555,1,0,0,0,555,556,1,0,0,0,556,557,5,47,0,0,557,558,3,78,39,0,558, - 559,3,84,42,0,559,561,1,0,0,0,560,548,1,0,0,0,560,552,1,0,0,0,561, - 564,1,0,0,0,562,560,1,0,0,0,562,563,1,0,0,0,563,79,1,0,0,0,564,562, - 1,0,0,0,565,567,7,2,0,0,566,565,1,0,0,0,566,567,1,0,0,0,567,568, - 1,0,0,0,568,575,5,44,0,0,569,571,5,44,0,0,570,572,7,2,0,0,571,570, - 1,0,0,0,571,572,1,0,0,0,572,575,1,0,0,0,573,575,7,2,0,0,574,566, - 1,0,0,0,574,569,1,0,0,0,574,573,1,0,0,0,575,609,1,0,0,0,576,578, - 7,3,0,0,577,576,1,0,0,0,577,578,1,0,0,0,578,579,1,0,0,0,579,581, - 7,4,0,0,580,582,5,65,0,0,581,580,1,0,0,0,581,582,1,0,0,0,582,591, - 1,0,0,0,583,585,7,4,0,0,584,586,5,65,0,0,585,584,1,0,0,0,585,586, - 1,0,0,0,586,588,1,0,0,0,587,589,7,3,0,0,588,587,1,0,0,0,588,589, - 1,0,0,0,589,591,1,0,0,0,590,577,1,0,0,0,590,583,1,0,0,0,591,609, - 1,0,0,0,592,594,7,5,0,0,593,592,1,0,0,0,593,594,1,0,0,0,594,595, + 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,823,8,58,1,58,1,58, + 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, + 3,58,840,8,58,1,58,1,58,1,58,1,58,3,58,846,8,58,1,58,3,58,849,8, + 58,1,58,3,58,852,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3, + 58,862,8,58,1,58,1,58,1,58,1,58,3,58,868,8,58,1,58,3,58,871,8,58, + 1,58,3,58,874,8,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,882,8,58,1, + 58,3,58,885,8,58,1,58,1,58,3,58,889,8,58,1,58,3,58,892,8,58,1,58, + 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,906, + 8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, + 1,58,1,58,1,58,3,58,923,8,58,1,58,1,58,1,58,3,58,928,8,58,1,58,1, + 58,1,58,3,58,933,8,58,1,58,1,58,1,58,1,58,3,58,939,8,58,1,58,1,58, + 1,58,1,58,1,58,3,58,946,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, + 1,58,1,58,1,58,3,58,958,8,58,1,58,1,58,3,58,962,8,58,1,58,3,58,965, + 8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,974,8,58,1,58,1,58, + 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,3,58,988,8,58, + 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, + 1,58,3,58,1004,8,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, + 1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58,1,58, + 1,58,1,58,1,58,1,58,1,58,3,58,1033,8,58,1,58,1,58,1,58,1,58,1,58, + 1,58,3,58,1041,8,58,5,58,1043,8,58,10,58,12,58,1046,9,58,1,59,1, + 59,1,59,1,59,5,59,1052,8,59,10,59,12,59,1055,9,59,1,59,3,59,1058, + 8,59,1,59,1,59,1,59,1,59,1,59,5,59,1065,8,59,10,59,12,59,1068,9, + 59,1,59,3,59,1071,8,59,1,59,1,59,3,59,1075,8,59,1,59,1,59,1,59,3, + 59,1080,8,59,1,60,1,60,1,60,5,60,1085,8,60,10,60,12,60,1088,9,60, + 1,60,1,60,1,60,1,60,1,60,1,60,5,60,1096,8,60,10,60,12,60,1099,9, + 60,1,60,1,60,1,60,1,60,1,60,1,60,3,60,1107,8,60,1,60,1,60,1,60,1, + 60,1,60,3,60,1114,8,60,1,61,1,61,1,61,1,61,1,61,1,61,1,61,1,61,1, + 61,1,61,1,61,3,61,1127,8,61,1,62,1,62,1,62,5,62,1132,8,62,10,62, + 12,62,1135,9,62,1,62,3,62,1138,8,62,1,63,1,63,1,63,1,63,1,63,1,63, + 1,63,1,63,1,63,1,63,3,63,1150,8,63,1,64,1,64,1,64,1,64,3,64,1156, + 8,64,1,64,3,64,1159,8,64,1,65,1,65,1,65,5,65,1164,8,65,10,65,12, + 65,1167,9,65,1,66,1,66,1,66,1,66,1,66,1,66,1,66,1,66,1,66,3,66,1178, + 8,66,1,66,1,66,1,66,1,66,3,66,1184,8,66,5,66,1186,8,66,10,66,12, + 66,1189,9,66,1,67,1,67,1,67,3,67,1194,8,67,1,67,1,67,1,68,1,68,1, + 68,3,68,1201,8,68,1,68,1,68,1,69,1,69,1,69,5,69,1208,8,69,10,69, + 12,69,1211,9,69,1,69,3,69,1214,8,69,1,70,1,70,1,71,1,71,1,71,1,71, + 1,71,1,71,3,71,1224,8,71,3,71,1226,8,71,1,72,3,72,1229,8,72,1,72, + 1,72,1,72,1,72,1,72,1,72,3,72,1237,8,72,1,73,1,73,1,73,3,73,1242, + 8,73,1,74,1,74,1,75,1,75,1,76,1,76,1,77,1,77,3,77,1252,8,77,1,78, + 1,78,1,78,3,78,1257,8,78,1,79,1,79,1,79,1,79,1,80,1,80,1,80,1,80, + 1,81,1,81,3,81,1269,8,81,1,82,1,82,5,82,1273,8,82,10,82,12,82,1276, + 9,82,1,82,1,82,1,83,1,83,1,83,1,83,1,83,3,83,1285,8,83,1,84,1,84, + 5,84,1289,8,84,10,84,12,84,1292,9,84,1,84,1,84,1,85,1,85,1,85,1, + 85,1,85,3,85,1301,8,85,1,85,0,3,78,116,132,86,0,2,4,6,8,10,12,14, + 16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58, + 60,62,64,66,68,70,72,74,76,78,80,82,84,86,88,90,92,94,96,98,100, + 102,104,106,108,110,112,114,116,118,120,122,124,126,128,130,132, + 134,136,138,140,142,144,146,148,150,152,154,156,158,160,162,164, + 166,168,170,0,16,2,0,18,18,74,74,2,0,44,44,51,51,3,0,1,1,4,4,8,8, + 4,0,1,1,3,4,8,8,80,80,2,0,51,51,73,73,2,0,1,1,4,4,2,0,7,7,22,23, + 2,0,30,30,49,49,2,0,71,71,76,76,3,0,10,10,50,50,90,90,2,0,41,41, + 53,53,1,0,107,108,2,0,118,118,139,139,7,0,21,21,38,38,55,56,70,70, + 78,78,97,97,103,103,16,0,1,13,15,20,22,28,30,30,32,37,39,42,44,51, + 53,54,58,58,60,69,71,77,79,83,85,92,94,96,98,99,101,102,4,0,20,20, + 30,30,39,39,48,48,1475,0,175,1,0,0,0,2,182,1,0,0,0,4,184,1,0,0,0, + 6,186,1,0,0,0,8,193,1,0,0,0,10,216,1,0,0,0,12,218,1,0,0,0,14,225, + 1,0,0,0,16,232,1,0,0,0,18,245,1,0,0,0,20,257,1,0,0,0,22,266,1,0, + 0,0,24,274,1,0,0,0,26,296,1,0,0,0,28,311,1,0,0,0,30,320,1,0,0,0, + 32,325,1,0,0,0,34,329,1,0,0,0,36,331,1,0,0,0,38,340,1,0,0,0,40,344, + 1,0,0,0,42,358,1,0,0,0,44,362,1,0,0,0,46,377,1,0,0,0,48,380,1,0, + 0,0,50,429,1,0,0,0,52,432,1,0,0,0,54,438,1,0,0,0,56,442,1,0,0,0, + 58,448,1,0,0,0,60,466,1,0,0,0,62,469,1,0,0,0,64,472,1,0,0,0,66,482, + 1,0,0,0,68,485,1,0,0,0,70,489,1,0,0,0,72,522,1,0,0,0,74,524,1,0, + 0,0,76,527,1,0,0,0,78,542,1,0,0,0,80,604,1,0,0,0,82,609,1,0,0,0, + 84,620,1,0,0,0,86,622,1,0,0,0,88,628,1,0,0,0,90,636,1,0,0,0,92,654, + 1,0,0,0,94,656,1,0,0,0,96,664,1,0,0,0,98,669,1,0,0,0,100,677,1,0, + 0,0,102,681,1,0,0,0,104,685,1,0,0,0,106,694,1,0,0,0,108,708,1,0, + 0,0,110,710,1,0,0,0,112,769,1,0,0,0,114,771,1,0,0,0,116,932,1,0, + 0,0,118,1074,1,0,0,0,120,1113,1,0,0,0,122,1126,1,0,0,0,124,1128, + 1,0,0,0,126,1149,1,0,0,0,128,1158,1,0,0,0,130,1160,1,0,0,0,132,1177, + 1,0,0,0,134,1190,1,0,0,0,136,1200,1,0,0,0,138,1204,1,0,0,0,140,1215, + 1,0,0,0,142,1225,1,0,0,0,144,1228,1,0,0,0,146,1241,1,0,0,0,148,1243, + 1,0,0,0,150,1245,1,0,0,0,152,1247,1,0,0,0,154,1251,1,0,0,0,156,1256, + 1,0,0,0,158,1258,1,0,0,0,160,1262,1,0,0,0,162,1268,1,0,0,0,164,1270, + 1,0,0,0,166,1284,1,0,0,0,168,1286,1,0,0,0,170,1300,1,0,0,0,172,174, + 3,2,1,0,173,172,1,0,0,0,174,177,1,0,0,0,175,173,1,0,0,0,175,176, + 1,0,0,0,176,178,1,0,0,0,177,175,1,0,0,0,178,179,5,0,0,1,179,1,1, + 0,0,0,180,183,3,6,3,0,181,183,3,10,5,0,182,180,1,0,0,0,182,181,1, + 0,0,0,183,3,1,0,0,0,184,185,3,116,58,0,185,5,1,0,0,0,186,187,5,52, + 0,0,187,191,3,156,78,0,188,189,5,115,0,0,189,190,5,122,0,0,190,192, + 3,4,2,0,191,188,1,0,0,0,191,192,1,0,0,0,192,7,1,0,0,0,193,198,3, + 156,78,0,194,195,5,116,0,0,195,197,3,156,78,0,196,194,1,0,0,0,197, + 200,1,0,0,0,198,196,1,0,0,0,198,199,1,0,0,0,199,202,1,0,0,0,200, + 198,1,0,0,0,201,203,5,116,0,0,202,201,1,0,0,0,202,203,1,0,0,0,203, + 9,1,0,0,0,204,217,3,12,6,0,205,217,3,14,7,0,206,217,3,18,9,0,207, + 217,3,20,10,0,208,217,3,22,11,0,209,217,3,26,13,0,210,217,3,24,12, + 0,211,217,3,28,14,0,212,217,3,30,15,0,213,217,3,36,18,0,214,217, + 3,32,16,0,215,217,3,34,17,0,216,204,1,0,0,0,216,205,1,0,0,0,216, + 206,1,0,0,0,216,207,1,0,0,0,216,208,1,0,0,0,216,209,1,0,0,0,216, + 210,1,0,0,0,216,211,1,0,0,0,216,212,1,0,0,0,216,213,1,0,0,0,216, + 214,1,0,0,0,216,215,1,0,0,0,217,11,1,0,0,0,218,220,5,72,0,0,219, + 221,3,4,2,0,220,219,1,0,0,0,220,221,1,0,0,0,221,223,1,0,0,0,222, + 224,5,150,0,0,223,222,1,0,0,0,223,224,1,0,0,0,224,13,1,0,0,0,225, + 227,5,84,0,0,226,228,3,4,2,0,227,226,1,0,0,0,227,228,1,0,0,0,228, + 230,1,0,0,0,229,231,5,150,0,0,230,229,1,0,0,0,230,231,1,0,0,0,231, + 15,1,0,0,0,232,241,5,14,0,0,233,234,5,130,0,0,234,237,3,156,78,0, + 235,236,5,115,0,0,236,238,3,156,78,0,237,235,1,0,0,0,237,238,1,0, + 0,0,238,239,1,0,0,0,239,240,5,149,0,0,240,242,1,0,0,0,241,233,1, + 0,0,0,241,242,1,0,0,0,242,243,1,0,0,0,243,244,3,36,18,0,244,17,1, + 0,0,0,245,246,5,93,0,0,246,250,3,36,18,0,247,249,3,16,8,0,248,247, + 1,0,0,0,249,252,1,0,0,0,250,248,1,0,0,0,250,251,1,0,0,0,251,255, + 1,0,0,0,252,250,1,0,0,0,253,254,5,29,0,0,254,256,3,36,18,0,255,253, + 1,0,0,0,255,256,1,0,0,0,256,19,1,0,0,0,257,258,5,40,0,0,258,259, + 5,130,0,0,259,260,3,4,2,0,260,261,5,149,0,0,261,264,3,10,5,0,262, + 263,5,25,0,0,263,265,3,10,5,0,264,262,1,0,0,0,264,265,1,0,0,0,265, + 21,1,0,0,0,266,267,5,100,0,0,267,268,5,130,0,0,268,269,3,4,2,0,269, + 270,5,149,0,0,270,272,3,10,5,0,271,273,5,150,0,0,272,271,1,0,0,0, + 272,273,1,0,0,0,273,23,1,0,0,0,274,275,5,33,0,0,275,279,5,130,0, + 0,276,280,3,6,3,0,277,280,3,30,15,0,278,280,3,4,2,0,279,276,1,0, + 0,0,279,277,1,0,0,0,279,278,1,0,0,0,279,280,1,0,0,0,280,281,1,0, + 0,0,281,283,5,150,0,0,282,284,3,4,2,0,283,282,1,0,0,0,283,284,1, + 0,0,0,284,285,1,0,0,0,285,289,5,150,0,0,286,290,3,6,3,0,287,290, + 3,30,15,0,288,290,3,4,2,0,289,286,1,0,0,0,289,287,1,0,0,0,289,288, + 1,0,0,0,289,290,1,0,0,0,290,291,1,0,0,0,291,292,5,149,0,0,292,294, + 3,10,5,0,293,295,5,150,0,0,294,293,1,0,0,0,294,295,1,0,0,0,295,25, + 1,0,0,0,296,297,5,33,0,0,297,298,5,130,0,0,298,299,5,52,0,0,299, + 302,3,156,78,0,300,301,5,116,0,0,301,303,3,156,78,0,302,300,1,0, + 0,0,302,303,1,0,0,0,303,304,1,0,0,0,304,305,5,42,0,0,305,306,3,4, + 2,0,306,307,5,149,0,0,307,309,3,10,5,0,308,310,5,150,0,0,309,308, + 1,0,0,0,309,310,1,0,0,0,310,27,1,0,0,0,311,312,5,31,0,0,312,313, + 3,156,78,0,313,315,5,130,0,0,314,316,3,8,4,0,315,314,1,0,0,0,315, + 316,1,0,0,0,316,317,1,0,0,0,317,318,5,149,0,0,318,319,3,36,18,0, + 319,29,1,0,0,0,320,321,3,4,2,0,321,322,5,115,0,0,322,323,5,122,0, + 0,323,324,3,4,2,0,324,31,1,0,0,0,325,327,3,4,2,0,326,328,5,150,0, + 0,327,326,1,0,0,0,327,328,1,0,0,0,328,33,1,0,0,0,329,330,5,150,0, + 0,330,35,1,0,0,0,331,335,5,128,0,0,332,334,3,2,1,0,333,332,1,0,0, + 0,334,337,1,0,0,0,335,333,1,0,0,0,335,336,1,0,0,0,336,338,1,0,0, + 0,337,335,1,0,0,0,338,339,5,147,0,0,339,37,1,0,0,0,340,341,3,4,2, + 0,341,342,5,115,0,0,342,343,3,4,2,0,343,39,1,0,0,0,344,349,3,38, + 19,0,345,346,5,116,0,0,346,348,3,38,19,0,347,345,1,0,0,0,348,351, + 1,0,0,0,349,347,1,0,0,0,349,350,1,0,0,0,350,353,1,0,0,0,351,349, + 1,0,0,0,352,354,5,116,0,0,353,352,1,0,0,0,353,354,1,0,0,0,354,41, + 1,0,0,0,355,359,3,44,22,0,356,359,3,48,24,0,357,359,3,120,60,0,358, + 355,1,0,0,0,358,356,1,0,0,0,358,357,1,0,0,0,359,360,1,0,0,0,360, + 361,5,0,0,1,361,43,1,0,0,0,362,368,3,46,23,0,363,364,5,95,0,0,364, + 365,5,1,0,0,365,367,3,46,23,0,366,363,1,0,0,0,367,370,1,0,0,0,368, + 366,1,0,0,0,368,369,1,0,0,0,369,45,1,0,0,0,370,368,1,0,0,0,371,378, + 3,48,24,0,372,373,5,130,0,0,373,374,3,44,22,0,374,375,5,149,0,0, + 375,378,1,0,0,0,376,378,3,160,80,0,377,371,1,0,0,0,377,372,1,0,0, + 0,377,376,1,0,0,0,378,47,1,0,0,0,379,381,3,50,25,0,380,379,1,0,0, + 0,380,381,1,0,0,0,381,382,1,0,0,0,382,384,5,79,0,0,383,385,5,24, + 0,0,384,383,1,0,0,0,384,385,1,0,0,0,385,387,1,0,0,0,386,388,3,52, + 26,0,387,386,1,0,0,0,387,388,1,0,0,0,388,389,1,0,0,0,389,391,3,114, + 57,0,390,392,3,54,27,0,391,390,1,0,0,0,391,392,1,0,0,0,392,394,1, + 0,0,0,393,395,3,56,28,0,394,393,1,0,0,0,394,395,1,0,0,0,395,397, + 1,0,0,0,396,398,3,60,30,0,397,396,1,0,0,0,397,398,1,0,0,0,398,400, + 1,0,0,0,399,401,3,62,31,0,400,399,1,0,0,0,400,401,1,0,0,0,401,403, + 1,0,0,0,402,404,3,64,32,0,403,402,1,0,0,0,403,404,1,0,0,0,404,407, + 1,0,0,0,405,406,5,102,0,0,406,408,7,0,0,0,407,405,1,0,0,0,407,408, + 1,0,0,0,408,411,1,0,0,0,409,410,5,102,0,0,410,412,5,89,0,0,411,409, + 1,0,0,0,411,412,1,0,0,0,412,414,1,0,0,0,413,415,3,66,33,0,414,413, + 1,0,0,0,414,415,1,0,0,0,415,417,1,0,0,0,416,418,3,58,29,0,417,416, + 1,0,0,0,417,418,1,0,0,0,418,420,1,0,0,0,419,421,3,68,34,0,420,419, + 1,0,0,0,420,421,1,0,0,0,421,424,1,0,0,0,422,425,3,72,36,0,423,425, + 3,74,37,0,424,422,1,0,0,0,424,423,1,0,0,0,424,425,1,0,0,0,425,427, + 1,0,0,0,426,428,3,76,38,0,427,426,1,0,0,0,427,428,1,0,0,0,428,49, + 1,0,0,0,429,430,5,102,0,0,430,431,3,124,62,0,431,51,1,0,0,0,432, + 433,5,88,0,0,433,436,5,108,0,0,434,435,5,102,0,0,435,437,5,85,0, + 0,436,434,1,0,0,0,436,437,1,0,0,0,437,53,1,0,0,0,438,439,5,34,0, + 0,439,440,3,78,39,0,440,55,1,0,0,0,441,443,7,1,0,0,442,441,1,0,0, + 0,442,443,1,0,0,0,443,444,1,0,0,0,444,445,5,5,0,0,445,446,5,47,0, + 0,446,447,3,114,57,0,447,57,1,0,0,0,448,449,5,101,0,0,449,450,3, + 156,78,0,450,451,5,6,0,0,451,452,5,130,0,0,452,453,3,98,49,0,453, + 463,5,149,0,0,454,455,5,116,0,0,455,456,3,156,78,0,456,457,5,6,0, + 0,457,458,5,130,0,0,458,459,3,98,49,0,459,460,5,149,0,0,460,462, + 1,0,0,0,461,454,1,0,0,0,462,465,1,0,0,0,463,461,1,0,0,0,463,464, + 1,0,0,0,464,59,1,0,0,0,465,463,1,0,0,0,466,467,5,69,0,0,467,468, + 3,116,58,0,468,61,1,0,0,0,469,470,5,99,0,0,470,471,3,116,58,0,471, + 63,1,0,0,0,472,473,5,36,0,0,473,480,5,11,0,0,474,475,7,0,0,0,475, + 476,5,130,0,0,476,477,3,114,57,0,477,478,5,149,0,0,478,481,1,0,0, + 0,479,481,3,114,57,0,480,474,1,0,0,0,480,479,1,0,0,0,481,65,1,0, + 0,0,482,483,5,37,0,0,483,484,3,116,58,0,484,67,1,0,0,0,485,486,5, + 64,0,0,486,487,5,11,0,0,487,488,3,88,44,0,488,69,1,0,0,0,489,490, + 5,64,0,0,490,491,5,11,0,0,491,492,3,114,57,0,492,71,1,0,0,0,493, + 494,5,54,0,0,494,497,3,116,58,0,495,496,5,116,0,0,496,498,3,116, + 58,0,497,495,1,0,0,0,497,498,1,0,0,0,498,503,1,0,0,0,499,500,5,102, + 0,0,500,504,5,85,0,0,501,502,5,11,0,0,502,504,3,114,57,0,503,499, + 1,0,0,0,503,501,1,0,0,0,503,504,1,0,0,0,504,523,1,0,0,0,505,506, + 5,54,0,0,506,509,3,116,58,0,507,508,5,102,0,0,508,510,5,85,0,0,509, + 507,1,0,0,0,509,510,1,0,0,0,510,511,1,0,0,0,511,512,5,61,0,0,512, + 513,3,116,58,0,513,523,1,0,0,0,514,515,5,54,0,0,515,516,3,116,58, + 0,516,517,5,61,0,0,517,520,3,116,58,0,518,519,5,11,0,0,519,521,3, + 114,57,0,520,518,1,0,0,0,520,521,1,0,0,0,521,523,1,0,0,0,522,493, + 1,0,0,0,522,505,1,0,0,0,522,514,1,0,0,0,523,73,1,0,0,0,524,525,5, + 61,0,0,525,526,3,116,58,0,526,75,1,0,0,0,527,528,5,81,0,0,528,529, + 3,94,47,0,529,77,1,0,0,0,530,531,6,39,-1,0,531,533,3,132,66,0,532, + 534,5,28,0,0,533,532,1,0,0,0,533,534,1,0,0,0,534,536,1,0,0,0,535, + 537,3,86,43,0,536,535,1,0,0,0,536,537,1,0,0,0,537,543,1,0,0,0,538, + 539,5,130,0,0,539,540,3,78,39,0,540,541,5,149,0,0,541,543,1,0,0, + 0,542,530,1,0,0,0,542,538,1,0,0,0,543,558,1,0,0,0,544,545,10,3,0, + 0,545,546,3,82,41,0,546,547,3,78,39,4,547,557,1,0,0,0,548,550,10, + 4,0,0,549,551,3,80,40,0,550,549,1,0,0,0,550,551,1,0,0,0,551,552, + 1,0,0,0,552,553,5,47,0,0,553,554,3,78,39,0,554,555,3,84,42,0,555, + 557,1,0,0,0,556,544,1,0,0,0,556,548,1,0,0,0,557,560,1,0,0,0,558, + 556,1,0,0,0,558,559,1,0,0,0,559,79,1,0,0,0,560,558,1,0,0,0,561,563, + 7,2,0,0,562,561,1,0,0,0,562,563,1,0,0,0,563,564,1,0,0,0,564,571, + 5,44,0,0,565,567,5,44,0,0,566,568,7,2,0,0,567,566,1,0,0,0,567,568, + 1,0,0,0,568,571,1,0,0,0,569,571,7,2,0,0,570,562,1,0,0,0,570,565, + 1,0,0,0,570,569,1,0,0,0,571,605,1,0,0,0,572,574,7,3,0,0,573,572, + 1,0,0,0,573,574,1,0,0,0,574,575,1,0,0,0,575,577,7,4,0,0,576,578, + 5,65,0,0,577,576,1,0,0,0,577,578,1,0,0,0,578,587,1,0,0,0,579,581, + 7,4,0,0,580,582,5,65,0,0,581,580,1,0,0,0,581,582,1,0,0,0,582,584, + 1,0,0,0,583,585,7,3,0,0,584,583,1,0,0,0,584,585,1,0,0,0,585,587, + 1,0,0,0,586,573,1,0,0,0,586,579,1,0,0,0,587,605,1,0,0,0,588,590, + 7,5,0,0,589,588,1,0,0,0,589,590,1,0,0,0,590,591,1,0,0,0,591,593, + 5,35,0,0,592,594,5,65,0,0,593,592,1,0,0,0,593,594,1,0,0,0,594,603, 1,0,0,0,595,597,5,35,0,0,596,598,5,65,0,0,597,596,1,0,0,0,597,598, - 1,0,0,0,598,607,1,0,0,0,599,601,5,35,0,0,600,602,5,65,0,0,601,600, - 1,0,0,0,601,602,1,0,0,0,602,604,1,0,0,0,603,605,7,5,0,0,604,603, - 1,0,0,0,604,605,1,0,0,0,605,607,1,0,0,0,606,593,1,0,0,0,606,599, - 1,0,0,0,607,609,1,0,0,0,608,574,1,0,0,0,608,590,1,0,0,0,608,606, - 1,0,0,0,609,81,1,0,0,0,610,611,5,17,0,0,611,614,5,47,0,0,612,614, - 5,116,0,0,613,610,1,0,0,0,613,612,1,0,0,0,614,83,1,0,0,0,615,616, - 5,62,0,0,616,625,3,114,57,0,617,618,5,96,0,0,618,619,5,130,0,0,619, - 620,3,114,57,0,620,621,5,149,0,0,621,625,1,0,0,0,622,623,5,96,0, - 0,623,625,3,114,57,0,624,615,1,0,0,0,624,617,1,0,0,0,624,622,1,0, - 0,0,625,85,1,0,0,0,626,627,5,77,0,0,627,630,3,92,46,0,628,629,5, - 61,0,0,629,631,3,92,46,0,630,628,1,0,0,0,630,631,1,0,0,0,631,87, - 1,0,0,0,632,637,3,90,45,0,633,634,5,116,0,0,634,636,3,90,45,0,635, - 633,1,0,0,0,636,639,1,0,0,0,637,635,1,0,0,0,637,638,1,0,0,0,638, - 89,1,0,0,0,639,637,1,0,0,0,640,642,3,116,58,0,641,643,7,6,0,0,642, - 641,1,0,0,0,642,643,1,0,0,0,643,646,1,0,0,0,644,645,5,60,0,0,645, - 647,7,7,0,0,646,644,1,0,0,0,646,647,1,0,0,0,647,650,1,0,0,0,648, - 649,5,16,0,0,649,651,5,110,0,0,650,648,1,0,0,0,650,651,1,0,0,0,651, - 91,1,0,0,0,652,659,3,164,82,0,653,656,3,148,74,0,654,655,5,151,0, - 0,655,657,3,148,74,0,656,654,1,0,0,0,656,657,1,0,0,0,657,659,1,0, - 0,0,658,652,1,0,0,0,658,653,1,0,0,0,659,93,1,0,0,0,660,665,3,96, - 48,0,661,662,5,116,0,0,662,664,3,96,48,0,663,661,1,0,0,0,664,667, - 1,0,0,0,665,663,1,0,0,0,665,666,1,0,0,0,666,95,1,0,0,0,667,665,1, - 0,0,0,668,669,3,160,80,0,669,670,5,122,0,0,670,671,3,150,75,0,671, - 97,1,0,0,0,672,674,3,100,50,0,673,672,1,0,0,0,673,674,1,0,0,0,674, - 676,1,0,0,0,675,677,3,102,51,0,676,675,1,0,0,0,676,677,1,0,0,0,677, - 679,1,0,0,0,678,680,3,104,52,0,679,678,1,0,0,0,679,680,1,0,0,0,680, - 99,1,0,0,0,681,682,5,67,0,0,682,683,5,11,0,0,683,684,3,114,57,0, - 684,101,1,0,0,0,685,686,5,64,0,0,686,687,5,11,0,0,687,688,3,88,44, - 0,688,103,1,0,0,0,689,690,7,8,0,0,690,691,3,106,53,0,691,105,1,0, - 0,0,692,699,3,108,54,0,693,694,5,9,0,0,694,695,3,108,54,0,695,696, - 5,2,0,0,696,697,3,108,54,0,697,699,1,0,0,0,698,692,1,0,0,0,698,693, - 1,0,0,0,699,107,1,0,0,0,700,701,5,19,0,0,701,713,5,75,0,0,702,703, - 5,94,0,0,703,713,5,68,0,0,704,705,5,94,0,0,705,713,5,32,0,0,706, - 707,3,148,74,0,707,708,5,68,0,0,708,713,1,0,0,0,709,710,3,148,74, - 0,710,711,5,32,0,0,711,713,1,0,0,0,712,700,1,0,0,0,712,702,1,0,0, - 0,712,704,1,0,0,0,712,706,1,0,0,0,712,709,1,0,0,0,713,109,1,0,0, - 0,714,715,3,116,58,0,715,716,5,0,0,1,716,111,1,0,0,0,717,774,3,160, - 80,0,718,719,3,160,80,0,719,720,5,130,0,0,720,721,3,160,80,0,721, - 728,3,112,56,0,722,723,5,116,0,0,723,724,3,160,80,0,724,725,3,112, - 56,0,725,727,1,0,0,0,726,722,1,0,0,0,727,730,1,0,0,0,728,726,1,0, - 0,0,728,729,1,0,0,0,729,732,1,0,0,0,730,728,1,0,0,0,731,733,5,116, - 0,0,732,731,1,0,0,0,732,733,1,0,0,0,733,734,1,0,0,0,734,735,5,149, - 0,0,735,774,1,0,0,0,736,737,3,160,80,0,737,738,5,130,0,0,738,743, - 3,162,81,0,739,740,5,116,0,0,740,742,3,162,81,0,741,739,1,0,0,0, - 742,745,1,0,0,0,743,741,1,0,0,0,743,744,1,0,0,0,744,747,1,0,0,0, - 745,743,1,0,0,0,746,748,5,116,0,0,747,746,1,0,0,0,747,748,1,0,0, - 0,748,749,1,0,0,0,749,750,5,149,0,0,750,774,1,0,0,0,751,752,3,160, - 80,0,752,753,5,130,0,0,753,758,3,112,56,0,754,755,5,116,0,0,755, - 757,3,112,56,0,756,754,1,0,0,0,757,760,1,0,0,0,758,756,1,0,0,0,758, - 759,1,0,0,0,759,762,1,0,0,0,760,758,1,0,0,0,761,763,5,116,0,0,762, - 761,1,0,0,0,762,763,1,0,0,0,763,764,1,0,0,0,764,765,5,149,0,0,765, - 774,1,0,0,0,766,767,3,160,80,0,767,769,5,130,0,0,768,770,3,114,57, - 0,769,768,1,0,0,0,769,770,1,0,0,0,770,771,1,0,0,0,771,772,5,149, - 0,0,772,774,1,0,0,0,773,717,1,0,0,0,773,718,1,0,0,0,773,736,1,0, - 0,0,773,751,1,0,0,0,773,766,1,0,0,0,774,113,1,0,0,0,775,780,3,116, - 58,0,776,777,5,116,0,0,777,779,3,116,58,0,778,776,1,0,0,0,779,782, - 1,0,0,0,780,778,1,0,0,0,780,781,1,0,0,0,781,784,1,0,0,0,782,780, - 1,0,0,0,783,785,5,116,0,0,784,783,1,0,0,0,784,785,1,0,0,0,785,115, - 1,0,0,0,786,787,6,58,-1,0,787,789,5,12,0,0,788,790,3,116,58,0,789, - 788,1,0,0,0,789,790,1,0,0,0,790,796,1,0,0,0,791,792,5,98,0,0,792, - 793,3,116,58,0,793,794,5,83,0,0,794,795,3,116,58,0,795,797,1,0,0, - 0,796,791,1,0,0,0,797,798,1,0,0,0,798,796,1,0,0,0,798,799,1,0,0, - 0,799,802,1,0,0,0,800,801,5,25,0,0,801,803,3,116,58,0,802,800,1, - 0,0,0,802,803,1,0,0,0,803,804,1,0,0,0,804,805,5,26,0,0,805,936,1, - 0,0,0,806,807,5,13,0,0,807,808,5,130,0,0,808,809,3,116,58,0,809, - 810,5,6,0,0,810,811,3,112,56,0,811,812,5,149,0,0,812,936,1,0,0,0, - 813,814,5,20,0,0,814,936,5,110,0,0,815,816,5,45,0,0,816,817,3,116, - 58,0,817,818,3,152,76,0,818,936,1,0,0,0,819,820,5,82,0,0,820,821, - 5,130,0,0,821,822,3,116,58,0,822,823,5,34,0,0,823,826,3,116,58,0, - 824,825,5,33,0,0,825,827,3,116,58,0,826,824,1,0,0,0,826,827,1,0, - 0,0,827,828,1,0,0,0,828,829,5,149,0,0,829,936,1,0,0,0,830,831,5, - 86,0,0,831,936,5,110,0,0,832,833,5,91,0,0,833,834,5,130,0,0,834, - 835,7,9,0,0,835,836,3,166,83,0,836,837,5,34,0,0,837,838,3,116,58, - 0,838,839,5,149,0,0,839,936,1,0,0,0,840,841,3,160,80,0,841,843,5, - 130,0,0,842,844,3,114,57,0,843,842,1,0,0,0,843,844,1,0,0,0,844,845, - 1,0,0,0,845,846,5,149,0,0,846,855,1,0,0,0,847,849,5,130,0,0,848, - 850,5,24,0,0,849,848,1,0,0,0,849,850,1,0,0,0,850,852,1,0,0,0,851, - 853,3,118,59,0,852,851,1,0,0,0,852,853,1,0,0,0,853,854,1,0,0,0,854, - 856,5,149,0,0,855,847,1,0,0,0,855,856,1,0,0,0,856,857,1,0,0,0,857, - 858,5,66,0,0,858,859,5,130,0,0,859,860,3,98,49,0,860,861,5,149,0, - 0,861,936,1,0,0,0,862,863,3,160,80,0,863,865,5,130,0,0,864,866,3, - 114,57,0,865,864,1,0,0,0,865,866,1,0,0,0,866,867,1,0,0,0,867,868, - 5,149,0,0,868,877,1,0,0,0,869,871,5,130,0,0,870,872,5,24,0,0,871, - 870,1,0,0,0,871,872,1,0,0,0,872,874,1,0,0,0,873,875,3,118,59,0,874, - 873,1,0,0,0,874,875,1,0,0,0,875,876,1,0,0,0,876,878,5,149,0,0,877, - 869,1,0,0,0,877,878,1,0,0,0,878,879,1,0,0,0,879,880,5,66,0,0,880, - 881,3,160,80,0,881,936,1,0,0,0,882,888,3,160,80,0,883,885,5,130, - 0,0,884,886,3,114,57,0,885,884,1,0,0,0,885,886,1,0,0,0,886,887,1, - 0,0,0,887,889,5,149,0,0,888,883,1,0,0,0,888,889,1,0,0,0,889,890, - 1,0,0,0,890,892,5,130,0,0,891,893,5,24,0,0,892,891,1,0,0,0,892,893, - 1,0,0,0,893,895,1,0,0,0,894,896,3,118,59,0,895,894,1,0,0,0,895,896, - 1,0,0,0,896,897,1,0,0,0,897,898,5,149,0,0,898,936,1,0,0,0,899,936, - 3,124,62,0,900,936,3,168,84,0,901,936,3,150,75,0,902,903,5,118,0, - 0,903,936,3,116,58,19,904,905,5,58,0,0,905,936,3,116,58,13,906,907, - 3,140,70,0,907,908,5,120,0,0,908,910,1,0,0,0,909,906,1,0,0,0,909, - 910,1,0,0,0,910,911,1,0,0,0,911,936,5,112,0,0,912,913,5,130,0,0, - 913,914,3,44,22,0,914,915,5,149,0,0,915,936,1,0,0,0,916,917,5,130, - 0,0,917,918,3,116,58,0,918,919,5,149,0,0,919,936,1,0,0,0,920,921, - 5,130,0,0,921,922,3,114,57,0,922,923,5,149,0,0,923,936,1,0,0,0,924, - 926,5,129,0,0,925,927,3,114,57,0,926,925,1,0,0,0,926,927,1,0,0,0, - 927,928,1,0,0,0,928,936,5,148,0,0,929,931,5,128,0,0,930,932,3,40, - 20,0,931,930,1,0,0,0,931,932,1,0,0,0,932,933,1,0,0,0,933,936,5,147, - 0,0,934,936,3,132,66,0,935,786,1,0,0,0,935,806,1,0,0,0,935,813,1, - 0,0,0,935,815,1,0,0,0,935,819,1,0,0,0,935,830,1,0,0,0,935,832,1, - 0,0,0,935,840,1,0,0,0,935,862,1,0,0,0,935,882,1,0,0,0,935,899,1, - 0,0,0,935,900,1,0,0,0,935,901,1,0,0,0,935,902,1,0,0,0,935,904,1, - 0,0,0,935,909,1,0,0,0,935,912,1,0,0,0,935,916,1,0,0,0,935,920,1, - 0,0,0,935,924,1,0,0,0,935,929,1,0,0,0,935,934,1,0,0,0,936,1041,1, - 0,0,0,937,941,10,18,0,0,938,942,5,112,0,0,939,942,5,151,0,0,940, - 942,5,138,0,0,941,938,1,0,0,0,941,939,1,0,0,0,941,940,1,0,0,0,942, - 943,1,0,0,0,943,1040,3,116,58,19,944,948,10,17,0,0,945,949,5,139, - 0,0,946,949,5,118,0,0,947,949,5,117,0,0,948,945,1,0,0,0,948,946, - 1,0,0,0,948,947,1,0,0,0,949,950,1,0,0,0,950,1040,3,116,58,18,951, - 976,10,16,0,0,952,977,5,121,0,0,953,977,5,122,0,0,954,977,5,133, - 0,0,955,977,5,131,0,0,956,977,5,132,0,0,957,977,5,123,0,0,958,977, - 5,124,0,0,959,961,5,58,0,0,960,959,1,0,0,0,960,961,1,0,0,0,961,962, - 1,0,0,0,962,964,5,42,0,0,963,965,5,15,0,0,964,963,1,0,0,0,964,965, - 1,0,0,0,965,977,1,0,0,0,966,968,5,58,0,0,967,966,1,0,0,0,967,968, - 1,0,0,0,968,969,1,0,0,0,969,977,7,10,0,0,970,977,5,145,0,0,971,977, - 5,146,0,0,972,977,5,135,0,0,973,977,5,126,0,0,974,977,5,127,0,0, - 975,977,5,134,0,0,976,952,1,0,0,0,976,953,1,0,0,0,976,954,1,0,0, - 0,976,955,1,0,0,0,976,956,1,0,0,0,976,957,1,0,0,0,976,958,1,0,0, - 0,976,960,1,0,0,0,976,967,1,0,0,0,976,970,1,0,0,0,976,971,1,0,0, - 0,976,972,1,0,0,0,976,973,1,0,0,0,976,974,1,0,0,0,976,975,1,0,0, - 0,977,978,1,0,0,0,978,1040,3,116,58,17,979,980,10,14,0,0,980,981, - 5,137,0,0,981,1040,3,116,58,15,982,983,10,12,0,0,983,984,5,2,0,0, - 984,1040,3,116,58,13,985,986,10,11,0,0,986,987,5,63,0,0,987,1040, - 3,116,58,12,988,990,10,10,0,0,989,991,5,58,0,0,990,989,1,0,0,0,990, - 991,1,0,0,0,991,992,1,0,0,0,992,993,5,9,0,0,993,994,3,116,58,0,994, - 995,5,2,0,0,995,996,3,116,58,11,996,1040,1,0,0,0,997,998,10,9,0, - 0,998,999,5,140,0,0,999,1000,3,116,58,0,1000,1001,5,115,0,0,1001, - 1002,3,116,58,9,1002,1040,1,0,0,0,1003,1004,10,25,0,0,1004,1005, - 5,129,0,0,1005,1006,3,116,58,0,1006,1007,5,148,0,0,1007,1040,1,0, - 0,0,1008,1009,10,24,0,0,1009,1010,5,120,0,0,1010,1040,5,108,0,0, - 1011,1012,10,23,0,0,1012,1013,5,120,0,0,1013,1040,3,160,80,0,1014, - 1015,10,22,0,0,1015,1016,5,136,0,0,1016,1017,5,129,0,0,1017,1018, - 3,116,58,0,1018,1019,5,148,0,0,1019,1040,1,0,0,0,1020,1021,10,21, - 0,0,1021,1022,5,136,0,0,1022,1040,5,108,0,0,1023,1024,10,20,0,0, - 1024,1025,5,136,0,0,1025,1040,3,160,80,0,1026,1027,10,15,0,0,1027, - 1029,5,46,0,0,1028,1030,5,58,0,0,1029,1028,1,0,0,0,1029,1030,1,0, - 0,0,1030,1031,1,0,0,0,1031,1040,5,59,0,0,1032,1037,10,8,0,0,1033, - 1034,5,6,0,0,1034,1038,3,160,80,0,1035,1036,5,6,0,0,1036,1038,5, - 110,0,0,1037,1033,1,0,0,0,1037,1035,1,0,0,0,1038,1040,1,0,0,0,1039, - 937,1,0,0,0,1039,944,1,0,0,0,1039,951,1,0,0,0,1039,979,1,0,0,0,1039, - 982,1,0,0,0,1039,985,1,0,0,0,1039,988,1,0,0,0,1039,997,1,0,0,0,1039, - 1003,1,0,0,0,1039,1008,1,0,0,0,1039,1011,1,0,0,0,1039,1014,1,0,0, - 0,1039,1020,1,0,0,0,1039,1023,1,0,0,0,1039,1026,1,0,0,0,1039,1032, - 1,0,0,0,1040,1043,1,0,0,0,1041,1039,1,0,0,0,1041,1042,1,0,0,0,1042, - 117,1,0,0,0,1043,1041,1,0,0,0,1044,1049,3,120,60,0,1045,1046,5,116, - 0,0,1046,1048,3,120,60,0,1047,1045,1,0,0,0,1048,1051,1,0,0,0,1049, - 1047,1,0,0,0,1049,1050,1,0,0,0,1050,1053,1,0,0,0,1051,1049,1,0,0, - 0,1052,1054,5,116,0,0,1053,1052,1,0,0,0,1053,1054,1,0,0,0,1054,119, - 1,0,0,0,1055,1058,3,122,61,0,1056,1058,3,116,58,0,1057,1055,1,0, - 0,0,1057,1056,1,0,0,0,1058,121,1,0,0,0,1059,1060,5,130,0,0,1060, - 1065,3,160,80,0,1061,1062,5,116,0,0,1062,1064,3,160,80,0,1063,1061, - 1,0,0,0,1064,1067,1,0,0,0,1065,1063,1,0,0,0,1065,1066,1,0,0,0,1066, - 1069,1,0,0,0,1067,1065,1,0,0,0,1068,1070,5,116,0,0,1069,1068,1,0, - 0,0,1069,1070,1,0,0,0,1070,1071,1,0,0,0,1071,1072,5,149,0,0,1072, - 1085,1,0,0,0,1073,1078,3,160,80,0,1074,1075,5,116,0,0,1075,1077, - 3,160,80,0,1076,1074,1,0,0,0,1077,1080,1,0,0,0,1078,1076,1,0,0,0, - 1078,1079,1,0,0,0,1079,1082,1,0,0,0,1080,1078,1,0,0,0,1081,1083, - 5,116,0,0,1082,1081,1,0,0,0,1082,1083,1,0,0,0,1083,1085,1,0,0,0, - 1084,1059,1,0,0,0,1084,1073,1,0,0,0,1085,1086,1,0,0,0,1086,1087, - 5,111,0,0,1087,1088,3,116,58,0,1088,123,1,0,0,0,1089,1090,5,132, - 0,0,1090,1094,3,160,80,0,1091,1093,3,126,63,0,1092,1091,1,0,0,0, - 1093,1096,1,0,0,0,1094,1092,1,0,0,0,1094,1095,1,0,0,0,1095,1097, - 1,0,0,0,1096,1094,1,0,0,0,1097,1098,5,151,0,0,1098,1099,5,124,0, - 0,1099,1122,1,0,0,0,1100,1101,5,132,0,0,1101,1105,3,160,80,0,1102, - 1104,3,126,63,0,1103,1102,1,0,0,0,1104,1107,1,0,0,0,1105,1103,1, - 0,0,0,1105,1106,1,0,0,0,1106,1108,1,0,0,0,1107,1105,1,0,0,0,1108, - 1114,5,124,0,0,1109,1115,3,124,62,0,1110,1111,5,128,0,0,1111,1112, - 3,116,58,0,1112,1113,5,147,0,0,1113,1115,1,0,0,0,1114,1109,1,0,0, - 0,1114,1110,1,0,0,0,1114,1115,1,0,0,0,1115,1116,1,0,0,0,1116,1117, - 5,132,0,0,1117,1118,5,151,0,0,1118,1119,3,160,80,0,1119,1120,5,124, - 0,0,1120,1122,1,0,0,0,1121,1089,1,0,0,0,1121,1100,1,0,0,0,1122,125, - 1,0,0,0,1123,1124,3,160,80,0,1124,1125,5,122,0,0,1125,1126,3,166, - 83,0,1126,1135,1,0,0,0,1127,1128,3,160,80,0,1128,1129,5,122,0,0, - 1129,1130,5,128,0,0,1130,1131,3,116,58,0,1131,1132,5,147,0,0,1132, - 1135,1,0,0,0,1133,1135,3,160,80,0,1134,1123,1,0,0,0,1134,1127,1, - 0,0,0,1134,1133,1,0,0,0,1135,127,1,0,0,0,1136,1141,3,130,65,0,1137, - 1138,5,116,0,0,1138,1140,3,130,65,0,1139,1137,1,0,0,0,1140,1143, - 1,0,0,0,1141,1139,1,0,0,0,1141,1142,1,0,0,0,1142,1145,1,0,0,0,1143, - 1141,1,0,0,0,1144,1146,5,116,0,0,1145,1144,1,0,0,0,1145,1146,1,0, - 0,0,1146,129,1,0,0,0,1147,1148,3,160,80,0,1148,1149,5,6,0,0,1149, - 1150,5,130,0,0,1150,1151,3,44,22,0,1151,1152,5,149,0,0,1152,1158, - 1,0,0,0,1153,1154,3,116,58,0,1154,1155,5,6,0,0,1155,1156,3,160,80, - 0,1156,1158,1,0,0,0,1157,1147,1,0,0,0,1157,1153,1,0,0,0,1158,131, - 1,0,0,0,1159,1167,3,164,82,0,1160,1161,3,140,70,0,1161,1162,5,120, - 0,0,1162,1164,1,0,0,0,1163,1160,1,0,0,0,1163,1164,1,0,0,0,1164,1165, - 1,0,0,0,1165,1167,3,134,67,0,1166,1159,1,0,0,0,1166,1163,1,0,0,0, - 1167,133,1,0,0,0,1168,1173,3,160,80,0,1169,1170,5,120,0,0,1170,1172, - 3,160,80,0,1171,1169,1,0,0,0,1172,1175,1,0,0,0,1173,1171,1,0,0,0, - 1173,1174,1,0,0,0,1174,135,1,0,0,0,1175,1173,1,0,0,0,1176,1177,6, - 68,-1,0,1177,1186,3,140,70,0,1178,1186,3,138,69,0,1179,1180,5,130, - 0,0,1180,1181,3,44,22,0,1181,1182,5,149,0,0,1182,1186,1,0,0,0,1183, - 1186,3,124,62,0,1184,1186,3,164,82,0,1185,1176,1,0,0,0,1185,1178, - 1,0,0,0,1185,1179,1,0,0,0,1185,1183,1,0,0,0,1185,1184,1,0,0,0,1186, - 1195,1,0,0,0,1187,1191,10,3,0,0,1188,1192,3,158,79,0,1189,1190,5, - 6,0,0,1190,1192,3,160,80,0,1191,1188,1,0,0,0,1191,1189,1,0,0,0,1192, - 1194,1,0,0,0,1193,1187,1,0,0,0,1194,1197,1,0,0,0,1195,1193,1,0,0, - 0,1195,1196,1,0,0,0,1196,137,1,0,0,0,1197,1195,1,0,0,0,1198,1199, - 3,160,80,0,1199,1201,5,130,0,0,1200,1202,3,142,71,0,1201,1200,1, - 0,0,0,1201,1202,1,0,0,0,1202,1203,1,0,0,0,1203,1204,5,149,0,0,1204, - 139,1,0,0,0,1205,1206,3,144,72,0,1206,1207,5,120,0,0,1207,1209,1, - 0,0,0,1208,1205,1,0,0,0,1208,1209,1,0,0,0,1209,1210,1,0,0,0,1210, - 1211,3,160,80,0,1211,141,1,0,0,0,1212,1217,3,116,58,0,1213,1214, - 5,116,0,0,1214,1216,3,116,58,0,1215,1213,1,0,0,0,1216,1219,1,0,0, - 0,1217,1215,1,0,0,0,1217,1218,1,0,0,0,1218,1221,1,0,0,0,1219,1217, - 1,0,0,0,1220,1222,5,116,0,0,1221,1220,1,0,0,0,1221,1222,1,0,0,0, - 1222,143,1,0,0,0,1223,1224,3,160,80,0,1224,145,1,0,0,0,1225,1234, - 5,106,0,0,1226,1227,5,120,0,0,1227,1234,7,11,0,0,1228,1229,5,108, - 0,0,1229,1231,5,120,0,0,1230,1232,7,11,0,0,1231,1230,1,0,0,0,1231, - 1232,1,0,0,0,1232,1234,1,0,0,0,1233,1225,1,0,0,0,1233,1226,1,0,0, - 0,1233,1228,1,0,0,0,1234,147,1,0,0,0,1235,1237,7,12,0,0,1236,1235, - 1,0,0,0,1236,1237,1,0,0,0,1237,1244,1,0,0,0,1238,1245,3,146,73,0, - 1239,1245,5,107,0,0,1240,1245,5,108,0,0,1241,1245,5,109,0,0,1242, - 1245,5,43,0,0,1243,1245,5,57,0,0,1244,1238,1,0,0,0,1244,1239,1,0, - 0,0,1244,1240,1,0,0,0,1244,1241,1,0,0,0,1244,1242,1,0,0,0,1244,1243, - 1,0,0,0,1245,149,1,0,0,0,1246,1250,3,148,74,0,1247,1250,5,110,0, - 0,1248,1250,5,59,0,0,1249,1246,1,0,0,0,1249,1247,1,0,0,0,1249,1248, - 1,0,0,0,1250,151,1,0,0,0,1251,1252,7,13,0,0,1252,153,1,0,0,0,1253, - 1254,7,14,0,0,1254,155,1,0,0,0,1255,1256,7,15,0,0,1256,157,1,0,0, - 0,1257,1260,5,105,0,0,1258,1260,3,156,78,0,1259,1257,1,0,0,0,1259, - 1258,1,0,0,0,1260,159,1,0,0,0,1261,1265,5,105,0,0,1262,1265,3,152, - 76,0,1263,1265,3,154,77,0,1264,1261,1,0,0,0,1264,1262,1,0,0,0,1264, - 1263,1,0,0,0,1265,161,1,0,0,0,1266,1267,3,166,83,0,1267,1268,5,122, - 0,0,1268,1269,3,148,74,0,1269,163,1,0,0,0,1270,1271,5,128,0,0,1271, - 1272,3,134,67,0,1272,1273,5,147,0,0,1273,165,1,0,0,0,1274,1277,5, - 110,0,0,1275,1277,3,168,84,0,1276,1274,1,0,0,0,1276,1275,1,0,0,0, - 1277,167,1,0,0,0,1278,1282,5,142,0,0,1279,1281,3,170,85,0,1280,1279, - 1,0,0,0,1281,1284,1,0,0,0,1282,1280,1,0,0,0,1282,1283,1,0,0,0,1283, - 1285,1,0,0,0,1284,1282,1,0,0,0,1285,1286,5,144,0,0,1286,169,1,0, - 0,0,1287,1288,5,157,0,0,1288,1289,3,116,58,0,1289,1290,5,147,0,0, - 1290,1293,1,0,0,0,1291,1293,5,156,0,0,1292,1287,1,0,0,0,1292,1291, - 1,0,0,0,1293,171,1,0,0,0,1294,1298,5,143,0,0,1295,1297,3,174,87, - 0,1296,1295,1,0,0,0,1297,1300,1,0,0,0,1298,1296,1,0,0,0,1298,1299, - 1,0,0,0,1299,1301,1,0,0,0,1300,1298,1,0,0,0,1301,1302,5,0,0,1,1302, - 173,1,0,0,0,1303,1304,5,159,0,0,1304,1305,3,116,58,0,1305,1306,5, - 147,0,0,1306,1309,1,0,0,0,1307,1309,5,158,0,0,1308,1303,1,0,0,0, - 1308,1307,1,0,0,0,1309,175,1,0,0,0,168,179,186,195,202,206,220,224, - 227,231,234,241,245,254,259,268,276,283,287,293,298,306,313,319, - 331,339,353,357,362,372,381,384,388,391,395,398,401,404,407,411, - 415,418,421,424,428,431,440,446,467,484,501,507,513,524,526,537, - 540,546,554,560,562,566,571,574,577,581,585,588,590,593,597,601, - 604,606,608,613,624,630,637,642,646,650,656,658,665,673,676,679, - 698,712,728,732,743,747,758,762,769,773,780,784,789,798,802,826, - 843,849,852,855,865,871,874,877,885,888,892,895,909,926,931,935, - 941,948,960,964,967,976,990,1029,1037,1039,1041,1049,1053,1057,1065, - 1069,1078,1082,1084,1094,1105,1114,1121,1134,1141,1145,1157,1163, - 1166,1173,1185,1191,1195,1201,1208,1217,1221,1231,1233,1236,1244, - 1249,1259,1264,1276,1282,1292,1298,1308 + 1,0,0,0,598,600,1,0,0,0,599,601,7,5,0,0,600,599,1,0,0,0,600,601, + 1,0,0,0,601,603,1,0,0,0,602,589,1,0,0,0,602,595,1,0,0,0,603,605, + 1,0,0,0,604,570,1,0,0,0,604,586,1,0,0,0,604,602,1,0,0,0,605,81,1, + 0,0,0,606,607,5,17,0,0,607,610,5,47,0,0,608,610,5,116,0,0,609,606, + 1,0,0,0,609,608,1,0,0,0,610,83,1,0,0,0,611,612,5,62,0,0,612,621, + 3,114,57,0,613,614,5,96,0,0,614,615,5,130,0,0,615,616,3,114,57,0, + 616,617,5,149,0,0,617,621,1,0,0,0,618,619,5,96,0,0,619,621,3,114, + 57,0,620,611,1,0,0,0,620,613,1,0,0,0,620,618,1,0,0,0,621,85,1,0, + 0,0,622,623,5,77,0,0,623,626,3,92,46,0,624,625,5,61,0,0,625,627, + 3,92,46,0,626,624,1,0,0,0,626,627,1,0,0,0,627,87,1,0,0,0,628,633, + 3,90,45,0,629,630,5,116,0,0,630,632,3,90,45,0,631,629,1,0,0,0,632, + 635,1,0,0,0,633,631,1,0,0,0,633,634,1,0,0,0,634,89,1,0,0,0,635,633, + 1,0,0,0,636,638,3,116,58,0,637,639,7,6,0,0,638,637,1,0,0,0,638,639, + 1,0,0,0,639,642,1,0,0,0,640,641,5,60,0,0,641,643,7,7,0,0,642,640, + 1,0,0,0,642,643,1,0,0,0,643,646,1,0,0,0,644,645,5,16,0,0,645,647, + 5,110,0,0,646,644,1,0,0,0,646,647,1,0,0,0,647,91,1,0,0,0,648,655, + 3,160,80,0,649,652,3,144,72,0,650,651,5,151,0,0,651,653,3,144,72, + 0,652,650,1,0,0,0,652,653,1,0,0,0,653,655,1,0,0,0,654,648,1,0,0, + 0,654,649,1,0,0,0,655,93,1,0,0,0,656,661,3,96,48,0,657,658,5,116, + 0,0,658,660,3,96,48,0,659,657,1,0,0,0,660,663,1,0,0,0,661,659,1, + 0,0,0,661,662,1,0,0,0,662,95,1,0,0,0,663,661,1,0,0,0,664,665,3,156, + 78,0,665,666,5,122,0,0,666,667,3,146,73,0,667,97,1,0,0,0,668,670, + 3,100,50,0,669,668,1,0,0,0,669,670,1,0,0,0,670,672,1,0,0,0,671,673, + 3,102,51,0,672,671,1,0,0,0,672,673,1,0,0,0,673,675,1,0,0,0,674,676, + 3,104,52,0,675,674,1,0,0,0,675,676,1,0,0,0,676,99,1,0,0,0,677,678, + 5,67,0,0,678,679,5,11,0,0,679,680,3,114,57,0,680,101,1,0,0,0,681, + 682,5,64,0,0,682,683,5,11,0,0,683,684,3,88,44,0,684,103,1,0,0,0, + 685,686,7,8,0,0,686,687,3,106,53,0,687,105,1,0,0,0,688,695,3,108, + 54,0,689,690,5,9,0,0,690,691,3,108,54,0,691,692,5,2,0,0,692,693, + 3,108,54,0,693,695,1,0,0,0,694,688,1,0,0,0,694,689,1,0,0,0,695,107, + 1,0,0,0,696,697,5,19,0,0,697,709,5,75,0,0,698,699,5,94,0,0,699,709, + 5,68,0,0,700,701,5,94,0,0,701,709,5,32,0,0,702,703,3,144,72,0,703, + 704,5,68,0,0,704,709,1,0,0,0,705,706,3,144,72,0,706,707,5,32,0,0, + 707,709,1,0,0,0,708,696,1,0,0,0,708,698,1,0,0,0,708,700,1,0,0,0, + 708,702,1,0,0,0,708,705,1,0,0,0,709,109,1,0,0,0,710,711,3,116,58, + 0,711,712,5,0,0,1,712,111,1,0,0,0,713,770,3,156,78,0,714,715,3,156, + 78,0,715,716,5,130,0,0,716,717,3,156,78,0,717,724,3,112,56,0,718, + 719,5,116,0,0,719,720,3,156,78,0,720,721,3,112,56,0,721,723,1,0, + 0,0,722,718,1,0,0,0,723,726,1,0,0,0,724,722,1,0,0,0,724,725,1,0, + 0,0,725,728,1,0,0,0,726,724,1,0,0,0,727,729,5,116,0,0,728,727,1, + 0,0,0,728,729,1,0,0,0,729,730,1,0,0,0,730,731,5,149,0,0,731,770, + 1,0,0,0,732,733,3,156,78,0,733,734,5,130,0,0,734,739,3,158,79,0, + 735,736,5,116,0,0,736,738,3,158,79,0,737,735,1,0,0,0,738,741,1,0, + 0,0,739,737,1,0,0,0,739,740,1,0,0,0,740,743,1,0,0,0,741,739,1,0, + 0,0,742,744,5,116,0,0,743,742,1,0,0,0,743,744,1,0,0,0,744,745,1, + 0,0,0,745,746,5,149,0,0,746,770,1,0,0,0,747,748,3,156,78,0,748,749, + 5,130,0,0,749,754,3,112,56,0,750,751,5,116,0,0,751,753,3,112,56, + 0,752,750,1,0,0,0,753,756,1,0,0,0,754,752,1,0,0,0,754,755,1,0,0, + 0,755,758,1,0,0,0,756,754,1,0,0,0,757,759,5,116,0,0,758,757,1,0, + 0,0,758,759,1,0,0,0,759,760,1,0,0,0,760,761,5,149,0,0,761,770,1, + 0,0,0,762,763,3,156,78,0,763,765,5,130,0,0,764,766,3,114,57,0,765, + 764,1,0,0,0,765,766,1,0,0,0,766,767,1,0,0,0,767,768,5,149,0,0,768, + 770,1,0,0,0,769,713,1,0,0,0,769,714,1,0,0,0,769,732,1,0,0,0,769, + 747,1,0,0,0,769,762,1,0,0,0,770,113,1,0,0,0,771,776,3,116,58,0,772, + 773,5,116,0,0,773,775,3,116,58,0,774,772,1,0,0,0,775,778,1,0,0,0, + 776,774,1,0,0,0,776,777,1,0,0,0,777,780,1,0,0,0,778,776,1,0,0,0, + 779,781,5,116,0,0,780,779,1,0,0,0,780,781,1,0,0,0,781,115,1,0,0, + 0,782,783,6,58,-1,0,783,785,5,12,0,0,784,786,3,116,58,0,785,784, + 1,0,0,0,785,786,1,0,0,0,786,792,1,0,0,0,787,788,5,98,0,0,788,789, + 3,116,58,0,789,790,5,83,0,0,790,791,3,116,58,0,791,793,1,0,0,0,792, + 787,1,0,0,0,793,794,1,0,0,0,794,792,1,0,0,0,794,795,1,0,0,0,795, + 798,1,0,0,0,796,797,5,25,0,0,797,799,3,116,58,0,798,796,1,0,0,0, + 798,799,1,0,0,0,799,800,1,0,0,0,800,801,5,26,0,0,801,933,1,0,0,0, + 802,803,5,13,0,0,803,804,5,130,0,0,804,805,3,116,58,0,805,806,5, + 6,0,0,806,807,3,112,56,0,807,808,5,149,0,0,808,933,1,0,0,0,809,810, + 5,20,0,0,810,933,5,110,0,0,811,812,5,45,0,0,812,813,3,116,58,0,813, + 814,3,148,74,0,814,933,1,0,0,0,815,816,5,82,0,0,816,817,5,130,0, + 0,817,818,3,116,58,0,818,819,5,34,0,0,819,822,3,116,58,0,820,821, + 5,33,0,0,821,823,3,116,58,0,822,820,1,0,0,0,822,823,1,0,0,0,823, + 824,1,0,0,0,824,825,5,149,0,0,825,933,1,0,0,0,826,827,5,86,0,0,827, + 933,5,110,0,0,828,829,5,91,0,0,829,830,5,130,0,0,830,831,7,9,0,0, + 831,832,3,162,81,0,832,833,5,34,0,0,833,834,3,116,58,0,834,835,5, + 149,0,0,835,933,1,0,0,0,836,837,3,156,78,0,837,839,5,130,0,0,838, + 840,3,114,57,0,839,838,1,0,0,0,839,840,1,0,0,0,840,841,1,0,0,0,841, + 842,5,149,0,0,842,851,1,0,0,0,843,845,5,130,0,0,844,846,5,24,0,0, + 845,844,1,0,0,0,845,846,1,0,0,0,846,848,1,0,0,0,847,849,3,114,57, + 0,848,847,1,0,0,0,848,849,1,0,0,0,849,850,1,0,0,0,850,852,5,149, + 0,0,851,843,1,0,0,0,851,852,1,0,0,0,852,853,1,0,0,0,853,854,5,66, + 0,0,854,855,5,130,0,0,855,856,3,98,49,0,856,857,5,149,0,0,857,933, + 1,0,0,0,858,859,3,156,78,0,859,861,5,130,0,0,860,862,3,114,57,0, + 861,860,1,0,0,0,861,862,1,0,0,0,862,863,1,0,0,0,863,864,5,149,0, + 0,864,873,1,0,0,0,865,867,5,130,0,0,866,868,5,24,0,0,867,866,1,0, + 0,0,867,868,1,0,0,0,868,870,1,0,0,0,869,871,3,114,57,0,870,869,1, + 0,0,0,870,871,1,0,0,0,871,872,1,0,0,0,872,874,5,149,0,0,873,865, + 1,0,0,0,873,874,1,0,0,0,874,875,1,0,0,0,875,876,5,66,0,0,876,877, + 3,156,78,0,877,933,1,0,0,0,878,884,3,156,78,0,879,881,5,130,0,0, + 880,882,3,114,57,0,881,880,1,0,0,0,881,882,1,0,0,0,882,883,1,0,0, + 0,883,885,5,149,0,0,884,879,1,0,0,0,884,885,1,0,0,0,885,886,1,0, + 0,0,886,888,5,130,0,0,887,889,5,24,0,0,888,887,1,0,0,0,888,889,1, + 0,0,0,889,891,1,0,0,0,890,892,3,114,57,0,891,890,1,0,0,0,891,892, + 1,0,0,0,892,893,1,0,0,0,893,894,5,149,0,0,894,933,1,0,0,0,895,933, + 3,120,60,0,896,933,3,164,82,0,897,933,3,146,73,0,898,899,5,118,0, + 0,899,933,3,116,58,20,900,901,5,58,0,0,901,933,3,116,58,14,902,903, + 3,136,68,0,903,904,5,120,0,0,904,906,1,0,0,0,905,902,1,0,0,0,905, + 906,1,0,0,0,906,907,1,0,0,0,907,933,5,112,0,0,908,909,5,130,0,0, + 909,910,3,44,22,0,910,911,5,149,0,0,911,933,1,0,0,0,912,913,5,130, + 0,0,913,914,3,116,58,0,914,915,5,149,0,0,915,933,1,0,0,0,916,917, + 5,130,0,0,917,918,3,114,57,0,918,919,5,149,0,0,919,933,1,0,0,0,920, + 922,5,129,0,0,921,923,3,114,57,0,922,921,1,0,0,0,922,923,1,0,0,0, + 923,924,1,0,0,0,924,933,5,148,0,0,925,927,5,128,0,0,926,928,3,40, + 20,0,927,926,1,0,0,0,927,928,1,0,0,0,928,929,1,0,0,0,929,933,5,147, + 0,0,930,933,3,118,59,0,931,933,3,128,64,0,932,782,1,0,0,0,932,802, + 1,0,0,0,932,809,1,0,0,0,932,811,1,0,0,0,932,815,1,0,0,0,932,826, + 1,0,0,0,932,828,1,0,0,0,932,836,1,0,0,0,932,858,1,0,0,0,932,878, + 1,0,0,0,932,895,1,0,0,0,932,896,1,0,0,0,932,897,1,0,0,0,932,898, + 1,0,0,0,932,900,1,0,0,0,932,905,1,0,0,0,932,908,1,0,0,0,932,912, + 1,0,0,0,932,916,1,0,0,0,932,920,1,0,0,0,932,925,1,0,0,0,932,930, + 1,0,0,0,932,931,1,0,0,0,933,1044,1,0,0,0,934,938,10,19,0,0,935,939, + 5,112,0,0,936,939,5,151,0,0,937,939,5,138,0,0,938,935,1,0,0,0,938, + 936,1,0,0,0,938,937,1,0,0,0,939,940,1,0,0,0,940,1043,3,116,58,20, + 941,945,10,18,0,0,942,946,5,139,0,0,943,946,5,118,0,0,944,946,5, + 117,0,0,945,942,1,0,0,0,945,943,1,0,0,0,945,944,1,0,0,0,946,947, + 1,0,0,0,947,1043,3,116,58,19,948,973,10,17,0,0,949,974,5,121,0,0, + 950,974,5,122,0,0,951,974,5,133,0,0,952,974,5,131,0,0,953,974,5, + 132,0,0,954,974,5,123,0,0,955,974,5,124,0,0,956,958,5,58,0,0,957, + 956,1,0,0,0,957,958,1,0,0,0,958,959,1,0,0,0,959,961,5,42,0,0,960, + 962,5,15,0,0,961,960,1,0,0,0,961,962,1,0,0,0,962,974,1,0,0,0,963, + 965,5,58,0,0,964,963,1,0,0,0,964,965,1,0,0,0,965,966,1,0,0,0,966, + 974,7,10,0,0,967,974,5,145,0,0,968,974,5,146,0,0,969,974,5,135,0, + 0,970,974,5,126,0,0,971,974,5,127,0,0,972,974,5,134,0,0,973,949, + 1,0,0,0,973,950,1,0,0,0,973,951,1,0,0,0,973,952,1,0,0,0,973,953, + 1,0,0,0,973,954,1,0,0,0,973,955,1,0,0,0,973,957,1,0,0,0,973,964, + 1,0,0,0,973,967,1,0,0,0,973,968,1,0,0,0,973,969,1,0,0,0,973,970, + 1,0,0,0,973,971,1,0,0,0,973,972,1,0,0,0,974,975,1,0,0,0,975,1043, + 3,116,58,18,976,977,10,15,0,0,977,978,5,137,0,0,978,1043,3,116,58, + 16,979,980,10,13,0,0,980,981,5,2,0,0,981,1043,3,116,58,14,982,983, + 10,12,0,0,983,984,5,63,0,0,984,1043,3,116,58,13,985,987,10,11,0, + 0,986,988,5,58,0,0,987,986,1,0,0,0,987,988,1,0,0,0,988,989,1,0,0, + 0,989,990,5,9,0,0,990,991,3,116,58,0,991,992,5,2,0,0,992,993,3,116, + 58,12,993,1043,1,0,0,0,994,995,10,10,0,0,995,996,5,140,0,0,996,997, + 3,116,58,0,997,998,5,115,0,0,998,999,3,116,58,10,999,1043,1,0,0, + 0,1000,1001,10,30,0,0,1001,1003,5,130,0,0,1002,1004,3,114,57,0,1003, + 1002,1,0,0,0,1003,1004,1,0,0,0,1004,1005,1,0,0,0,1005,1043,5,149, + 0,0,1006,1007,10,26,0,0,1007,1008,5,129,0,0,1008,1009,3,116,58,0, + 1009,1010,5,148,0,0,1010,1043,1,0,0,0,1011,1012,10,25,0,0,1012,1013, + 5,120,0,0,1013,1043,5,108,0,0,1014,1015,10,24,0,0,1015,1016,5,120, + 0,0,1016,1043,3,156,78,0,1017,1018,10,23,0,0,1018,1019,5,136,0,0, + 1019,1020,5,129,0,0,1020,1021,3,116,58,0,1021,1022,5,148,0,0,1022, + 1043,1,0,0,0,1023,1024,10,22,0,0,1024,1025,5,136,0,0,1025,1043,5, + 108,0,0,1026,1027,10,21,0,0,1027,1028,5,136,0,0,1028,1043,3,156, + 78,0,1029,1030,10,16,0,0,1030,1032,5,46,0,0,1031,1033,5,58,0,0,1032, + 1031,1,0,0,0,1032,1033,1,0,0,0,1033,1034,1,0,0,0,1034,1043,5,59, + 0,0,1035,1040,10,9,0,0,1036,1037,5,6,0,0,1037,1041,3,156,78,0,1038, + 1039,5,6,0,0,1039,1041,5,110,0,0,1040,1036,1,0,0,0,1040,1038,1,0, + 0,0,1041,1043,1,0,0,0,1042,934,1,0,0,0,1042,941,1,0,0,0,1042,948, + 1,0,0,0,1042,976,1,0,0,0,1042,979,1,0,0,0,1042,982,1,0,0,0,1042, + 985,1,0,0,0,1042,994,1,0,0,0,1042,1000,1,0,0,0,1042,1006,1,0,0,0, + 1042,1011,1,0,0,0,1042,1014,1,0,0,0,1042,1017,1,0,0,0,1042,1023, + 1,0,0,0,1042,1026,1,0,0,0,1042,1029,1,0,0,0,1042,1035,1,0,0,0,1043, + 1046,1,0,0,0,1044,1042,1,0,0,0,1044,1045,1,0,0,0,1045,117,1,0,0, + 0,1046,1044,1,0,0,0,1047,1048,5,130,0,0,1048,1053,3,156,78,0,1049, + 1050,5,116,0,0,1050,1052,3,156,78,0,1051,1049,1,0,0,0,1052,1055, + 1,0,0,0,1053,1051,1,0,0,0,1053,1054,1,0,0,0,1054,1057,1,0,0,0,1055, + 1053,1,0,0,0,1056,1058,5,116,0,0,1057,1056,1,0,0,0,1057,1058,1,0, + 0,0,1058,1059,1,0,0,0,1059,1060,5,149,0,0,1060,1075,1,0,0,0,1061, + 1066,3,156,78,0,1062,1063,5,116,0,0,1063,1065,3,156,78,0,1064,1062, + 1,0,0,0,1065,1068,1,0,0,0,1066,1064,1,0,0,0,1066,1067,1,0,0,0,1067, + 1070,1,0,0,0,1068,1066,1,0,0,0,1069,1071,5,116,0,0,1070,1069,1,0, + 0,0,1070,1071,1,0,0,0,1071,1075,1,0,0,0,1072,1073,5,130,0,0,1073, + 1075,5,149,0,0,1074,1047,1,0,0,0,1074,1061,1,0,0,0,1074,1072,1,0, + 0,0,1075,1076,1,0,0,0,1076,1079,5,111,0,0,1077,1080,3,116,58,0,1078, + 1080,3,36,18,0,1079,1077,1,0,0,0,1079,1078,1,0,0,0,1080,119,1,0, + 0,0,1081,1082,5,132,0,0,1082,1086,3,156,78,0,1083,1085,3,122,61, + 0,1084,1083,1,0,0,0,1085,1088,1,0,0,0,1086,1084,1,0,0,0,1086,1087, + 1,0,0,0,1087,1089,1,0,0,0,1088,1086,1,0,0,0,1089,1090,5,151,0,0, + 1090,1091,5,124,0,0,1091,1114,1,0,0,0,1092,1093,5,132,0,0,1093,1097, + 3,156,78,0,1094,1096,3,122,61,0,1095,1094,1,0,0,0,1096,1099,1,0, + 0,0,1097,1095,1,0,0,0,1097,1098,1,0,0,0,1098,1100,1,0,0,0,1099,1097, + 1,0,0,0,1100,1106,5,124,0,0,1101,1107,3,120,60,0,1102,1103,5,128, + 0,0,1103,1104,3,116,58,0,1104,1105,5,147,0,0,1105,1107,1,0,0,0,1106, + 1101,1,0,0,0,1106,1102,1,0,0,0,1106,1107,1,0,0,0,1107,1108,1,0,0, + 0,1108,1109,5,132,0,0,1109,1110,5,151,0,0,1110,1111,3,156,78,0,1111, + 1112,5,124,0,0,1112,1114,1,0,0,0,1113,1081,1,0,0,0,1113,1092,1,0, + 0,0,1114,121,1,0,0,0,1115,1116,3,156,78,0,1116,1117,5,122,0,0,1117, + 1118,3,162,81,0,1118,1127,1,0,0,0,1119,1120,3,156,78,0,1120,1121, + 5,122,0,0,1121,1122,5,128,0,0,1122,1123,3,116,58,0,1123,1124,5,147, + 0,0,1124,1127,1,0,0,0,1125,1127,3,156,78,0,1126,1115,1,0,0,0,1126, + 1119,1,0,0,0,1126,1125,1,0,0,0,1127,123,1,0,0,0,1128,1133,3,126, + 63,0,1129,1130,5,116,0,0,1130,1132,3,126,63,0,1131,1129,1,0,0,0, + 1132,1135,1,0,0,0,1133,1131,1,0,0,0,1133,1134,1,0,0,0,1134,1137, + 1,0,0,0,1135,1133,1,0,0,0,1136,1138,5,116,0,0,1137,1136,1,0,0,0, + 1137,1138,1,0,0,0,1138,125,1,0,0,0,1139,1140,3,156,78,0,1140,1141, + 5,6,0,0,1141,1142,5,130,0,0,1142,1143,3,44,22,0,1143,1144,5,149, + 0,0,1144,1150,1,0,0,0,1145,1146,3,116,58,0,1146,1147,5,6,0,0,1147, + 1148,3,156,78,0,1148,1150,1,0,0,0,1149,1139,1,0,0,0,1149,1145,1, + 0,0,0,1150,127,1,0,0,0,1151,1159,3,160,80,0,1152,1153,3,136,68,0, + 1153,1154,5,120,0,0,1154,1156,1,0,0,0,1155,1152,1,0,0,0,1155,1156, + 1,0,0,0,1156,1157,1,0,0,0,1157,1159,3,130,65,0,1158,1151,1,0,0,0, + 1158,1155,1,0,0,0,1159,129,1,0,0,0,1160,1165,3,156,78,0,1161,1162, + 5,120,0,0,1162,1164,3,156,78,0,1163,1161,1,0,0,0,1164,1167,1,0,0, + 0,1165,1163,1,0,0,0,1165,1166,1,0,0,0,1166,131,1,0,0,0,1167,1165, + 1,0,0,0,1168,1169,6,66,-1,0,1169,1178,3,136,68,0,1170,1178,3,134, + 67,0,1171,1172,5,130,0,0,1172,1173,3,44,22,0,1173,1174,5,149,0,0, + 1174,1178,1,0,0,0,1175,1178,3,120,60,0,1176,1178,3,160,80,0,1177, + 1168,1,0,0,0,1177,1170,1,0,0,0,1177,1171,1,0,0,0,1177,1175,1,0,0, + 0,1177,1176,1,0,0,0,1178,1187,1,0,0,0,1179,1183,10,3,0,0,1180,1184, + 3,154,77,0,1181,1182,5,6,0,0,1182,1184,3,156,78,0,1183,1180,1,0, + 0,0,1183,1181,1,0,0,0,1184,1186,1,0,0,0,1185,1179,1,0,0,0,1186,1189, + 1,0,0,0,1187,1185,1,0,0,0,1187,1188,1,0,0,0,1188,133,1,0,0,0,1189, + 1187,1,0,0,0,1190,1191,3,156,78,0,1191,1193,5,130,0,0,1192,1194, + 3,138,69,0,1193,1192,1,0,0,0,1193,1194,1,0,0,0,1194,1195,1,0,0,0, + 1195,1196,5,149,0,0,1196,135,1,0,0,0,1197,1198,3,140,70,0,1198,1199, + 5,120,0,0,1199,1201,1,0,0,0,1200,1197,1,0,0,0,1200,1201,1,0,0,0, + 1201,1202,1,0,0,0,1202,1203,3,156,78,0,1203,137,1,0,0,0,1204,1209, + 3,116,58,0,1205,1206,5,116,0,0,1206,1208,3,116,58,0,1207,1205,1, + 0,0,0,1208,1211,1,0,0,0,1209,1207,1,0,0,0,1209,1210,1,0,0,0,1210, + 1213,1,0,0,0,1211,1209,1,0,0,0,1212,1214,5,116,0,0,1213,1212,1,0, + 0,0,1213,1214,1,0,0,0,1214,139,1,0,0,0,1215,1216,3,156,78,0,1216, + 141,1,0,0,0,1217,1226,5,106,0,0,1218,1219,5,120,0,0,1219,1226,7, + 11,0,0,1220,1221,5,108,0,0,1221,1223,5,120,0,0,1222,1224,7,11,0, + 0,1223,1222,1,0,0,0,1223,1224,1,0,0,0,1224,1226,1,0,0,0,1225,1217, + 1,0,0,0,1225,1218,1,0,0,0,1225,1220,1,0,0,0,1226,143,1,0,0,0,1227, + 1229,7,12,0,0,1228,1227,1,0,0,0,1228,1229,1,0,0,0,1229,1236,1,0, + 0,0,1230,1237,3,142,71,0,1231,1237,5,107,0,0,1232,1237,5,108,0,0, + 1233,1237,5,109,0,0,1234,1237,5,43,0,0,1235,1237,5,57,0,0,1236,1230, + 1,0,0,0,1236,1231,1,0,0,0,1236,1232,1,0,0,0,1236,1233,1,0,0,0,1236, + 1234,1,0,0,0,1236,1235,1,0,0,0,1237,145,1,0,0,0,1238,1242,3,144, + 72,0,1239,1242,5,110,0,0,1240,1242,5,59,0,0,1241,1238,1,0,0,0,1241, + 1239,1,0,0,0,1241,1240,1,0,0,0,1242,147,1,0,0,0,1243,1244,7,13,0, + 0,1244,149,1,0,0,0,1245,1246,7,14,0,0,1246,151,1,0,0,0,1247,1248, + 7,15,0,0,1248,153,1,0,0,0,1249,1252,5,105,0,0,1250,1252,3,152,76, + 0,1251,1249,1,0,0,0,1251,1250,1,0,0,0,1252,155,1,0,0,0,1253,1257, + 5,105,0,0,1254,1257,3,148,74,0,1255,1257,3,150,75,0,1256,1253,1, + 0,0,0,1256,1254,1,0,0,0,1256,1255,1,0,0,0,1257,157,1,0,0,0,1258, + 1259,3,162,81,0,1259,1260,5,122,0,0,1260,1261,3,144,72,0,1261,159, + 1,0,0,0,1262,1263,5,128,0,0,1263,1264,3,130,65,0,1264,1265,5,147, + 0,0,1265,161,1,0,0,0,1266,1269,5,110,0,0,1267,1269,3,164,82,0,1268, + 1266,1,0,0,0,1268,1267,1,0,0,0,1269,163,1,0,0,0,1270,1274,5,142, + 0,0,1271,1273,3,166,83,0,1272,1271,1,0,0,0,1273,1276,1,0,0,0,1274, + 1272,1,0,0,0,1274,1275,1,0,0,0,1275,1277,1,0,0,0,1276,1274,1,0,0, + 0,1277,1278,5,144,0,0,1278,165,1,0,0,0,1279,1280,5,157,0,0,1280, + 1281,3,116,58,0,1281,1282,5,147,0,0,1282,1285,1,0,0,0,1283,1285, + 5,156,0,0,1284,1279,1,0,0,0,1284,1283,1,0,0,0,1285,167,1,0,0,0,1286, + 1290,5,143,0,0,1287,1289,3,170,85,0,1288,1287,1,0,0,0,1289,1292, + 1,0,0,0,1290,1288,1,0,0,0,1290,1291,1,0,0,0,1291,1293,1,0,0,0,1292, + 1290,1,0,0,0,1293,1294,5,0,0,1,1294,169,1,0,0,0,1295,1296,5,159, + 0,0,1296,1297,3,116,58,0,1297,1298,5,147,0,0,1298,1301,1,0,0,0,1299, + 1301,5,158,0,0,1300,1295,1,0,0,0,1300,1299,1,0,0,0,1301,171,1,0, + 0,0,167,175,182,191,198,202,216,220,223,227,230,237,241,250,255, + 264,272,279,283,289,294,302,309,315,327,335,349,353,358,368,377, + 380,384,387,391,394,397,400,403,407,411,414,417,420,424,427,436, + 442,463,480,497,503,509,520,522,533,536,542,550,556,558,562,567, + 570,573,577,581,584,586,589,593,597,600,602,604,609,620,626,633, + 638,642,646,652,654,661,669,672,675,694,708,724,728,739,743,754, + 758,765,769,776,780,785,794,798,822,839,845,848,851,861,867,870, + 873,881,884,888,891,905,922,927,932,938,945,957,961,964,973,987, + 1003,1032,1040,1042,1044,1053,1057,1066,1070,1074,1079,1086,1097, + 1106,1113,1126,1133,1137,1149,1155,1158,1165,1177,1183,1187,1193, + 1200,1209,1213,1223,1225,1228,1236,1241,1251,1256,1268,1274,1284, + 1290,1300 ] class HogQLParser ( Parser ): @@ -675,35 +674,33 @@ class HogQLParser ( Parser ): RULE_columnTypeExpr = 56 RULE_columnExprList = 57 RULE_columnExpr = 58 - RULE_columnArgList = 59 - RULE_columnArgExpr = 60 - RULE_columnLambdaExpr = 61 - RULE_hogqlxTagElement = 62 - RULE_hogqlxTagAttribute = 63 - RULE_withExprList = 64 - RULE_withExpr = 65 - RULE_columnIdentifier = 66 - RULE_nestedIdentifier = 67 - RULE_tableExpr = 68 - RULE_tableFunctionExpr = 69 - RULE_tableIdentifier = 70 - RULE_tableArgList = 71 - RULE_databaseIdentifier = 72 - RULE_floatingLiteral = 73 - RULE_numberLiteral = 74 - RULE_literal = 75 - RULE_interval = 76 - RULE_keyword = 77 - RULE_keywordForAlias = 78 - RULE_alias = 79 - RULE_identifier = 80 - RULE_enumValue = 81 - RULE_placeholder = 82 - RULE_string = 83 - RULE_templateString = 84 - RULE_stringContents = 85 - RULE_fullTemplateString = 86 - RULE_stringContentsFull = 87 + RULE_columnLambdaExpr = 59 + RULE_hogqlxTagElement = 60 + RULE_hogqlxTagAttribute = 61 + RULE_withExprList = 62 + RULE_withExpr = 63 + RULE_columnIdentifier = 64 + RULE_nestedIdentifier = 65 + RULE_tableExpr = 66 + RULE_tableFunctionExpr = 67 + RULE_tableIdentifier = 68 + RULE_tableArgList = 69 + RULE_databaseIdentifier = 70 + RULE_floatingLiteral = 71 + RULE_numberLiteral = 72 + RULE_literal = 73 + RULE_interval = 74 + RULE_keyword = 75 + RULE_keywordForAlias = 76 + RULE_alias = 77 + RULE_identifier = 78 + RULE_enumValue = 79 + RULE_placeholder = 80 + RULE_string = 81 + RULE_templateString = 82 + RULE_stringContents = 83 + RULE_fullTemplateString = 84 + RULE_stringContentsFull = 85 ruleNames = [ "program", "declaration", "expression", "varDecl", "identifierList", "statement", "returnStmt", "throwStmt", "catchBlock", @@ -720,12 +717,11 @@ class HogQLParser ( Parser ): "windowExpr", "winPartitionByClause", "winOrderByClause", "winFrameClause", "winFrameExtend", "winFrameBound", "expr", "columnTypeExpr", "columnExprList", "columnExpr", - "columnArgList", "columnArgExpr", "columnLambdaExpr", - "hogqlxTagElement", "hogqlxTagAttribute", "withExprList", - "withExpr", "columnIdentifier", "nestedIdentifier", "tableExpr", - "tableFunctionExpr", "tableIdentifier", "tableArgList", - "databaseIdentifier", "floatingLiteral", "numberLiteral", - "literal", "interval", "keyword", "keywordForAlias", + "columnLambdaExpr", "hogqlxTagElement", "hogqlxTagAttribute", + "withExprList", "withExpr", "columnIdentifier", "nestedIdentifier", + "tableExpr", "tableFunctionExpr", "tableIdentifier", + "tableArgList", "databaseIdentifier", "floatingLiteral", + "numberLiteral", "literal", "interval", "keyword", "keywordForAlias", "alias", "identifier", "enumValue", "placeholder", "string", "templateString", "stringContents", "fullTemplateString", "stringContentsFull" ] @@ -893,7 +889,7 @@ class HogQLParser ( Parser ): def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) - self.checkVersion("4.13.1") + self.checkVersion("4.13.2") self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) self._predicates = None @@ -936,17 +932,17 @@ def program(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 179 + self.state = 175 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & -536887298) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493105500848127) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 4212759) != 0): - self.state = 176 + self.state = 172 self.declaration() - self.state = 181 + self.state = 177 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 182 + self.state = 178 self.match(HogQLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -989,17 +985,17 @@ def declaration(self): localctx = HogQLParser.DeclarationContext(self, self._ctx, self.state) self.enterRule(localctx, 2, self.RULE_declaration) try: - self.state = 186 + self.state = 182 self._errHandler.sync(self) token = self._input.LA(1) if token in [52]: self.enterOuterAlt(localctx, 1) - self.state = 184 + self.state = 180 self.varDecl() pass elif token in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 105, 106, 107, 108, 109, 110, 112, 118, 120, 128, 129, 130, 132, 139, 142, 150]: self.enterOuterAlt(localctx, 2) - self.state = 185 + self.state = 181 self.statement() pass else: @@ -1043,7 +1039,7 @@ def expression(self): self.enterRule(localctx, 4, self.RULE_expression) try: self.enterOuterAlt(localctx, 1) - self.state = 188 + self.state = 184 self.columnExpr(0) except RecognitionException as re: localctx.exception = re @@ -1097,19 +1093,19 @@ def varDecl(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 190 + self.state = 186 self.match(HogQLParser.LET) - self.state = 191 + self.state = 187 self.identifier() - self.state = 195 + self.state = 191 self._errHandler.sync(self) _la = self._input.LA(1) if _la==115: - self.state = 192 + self.state = 188 self.match(HogQLParser.COLON) - self.state = 193 + self.state = 189 self.match(HogQLParser.EQ_SINGLE) - self.state = 194 + self.state = 190 self.expression() @@ -1161,26 +1157,26 @@ def identifierList(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 197 + self.state = 193 self.identifier() - self.state = 202 + self.state = 198 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,3,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 198 + self.state = 194 self.match(HogQLParser.COMMA) - self.state = 199 + self.state = 195 self.identifier() - self.state = 204 + self.state = 200 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,3,self._ctx) - self.state = 206 + self.state = 202 self._errHandler.sync(self) _la = self._input.LA(1) if _la==116: - self.state = 205 + self.state = 201 self.match(HogQLParser.COMMA) @@ -1265,78 +1261,78 @@ def statement(self): localctx = HogQLParser.StatementContext(self, self._ctx, self.state) self.enterRule(localctx, 10, self.RULE_statement) try: - self.state = 220 + self.state = 216 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,5,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 208 + self.state = 204 self.returnStmt() pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 209 + self.state = 205 self.throwStmt() pass elif la_ == 3: self.enterOuterAlt(localctx, 3) - self.state = 210 + self.state = 206 self.tryCatchStmt() pass elif la_ == 4: self.enterOuterAlt(localctx, 4) - self.state = 211 + self.state = 207 self.ifStmt() pass elif la_ == 5: self.enterOuterAlt(localctx, 5) - self.state = 212 + self.state = 208 self.whileStmt() pass elif la_ == 6: self.enterOuterAlt(localctx, 6) - self.state = 213 + self.state = 209 self.forInStmt() pass elif la_ == 7: self.enterOuterAlt(localctx, 7) - self.state = 214 + self.state = 210 self.forStmt() pass elif la_ == 8: self.enterOuterAlt(localctx, 8) - self.state = 215 + self.state = 211 self.funcStmt() pass elif la_ == 9: self.enterOuterAlt(localctx, 9) - self.state = 216 + self.state = 212 self.varAssignment() pass elif la_ == 10: self.enterOuterAlt(localctx, 10) - self.state = 217 + self.state = 213 self.block() pass elif la_ == 11: self.enterOuterAlt(localctx, 11) - self.state = 218 + self.state = 214 self.exprStmt() pass elif la_ == 12: self.enterOuterAlt(localctx, 12) - self.state = 219 + self.state = 215 self.emptyStmt() pass @@ -1385,21 +1381,21 @@ def returnStmt(self): self.enterRule(localctx, 12, self.RULE_returnStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 222 + self.state = 218 self.match(HogQLParser.RETURN) - self.state = 224 + self.state = 220 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,6,self._ctx) if la_ == 1: - self.state = 223 + self.state = 219 self.expression() - self.state = 227 + self.state = 223 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,7,self._ctx) if la_ == 1: - self.state = 226 + self.state = 222 self.match(HogQLParser.SEMICOLON) @@ -1447,21 +1443,21 @@ def throwStmt(self): self.enterRule(localctx, 14, self.RULE_throwStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 229 + self.state = 225 self.match(HogQLParser.THROW) - self.state = 231 + self.state = 227 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,8,self._ctx) if la_ == 1: - self.state = 230 + self.state = 226 self.expression() - self.state = 234 + self.state = 230 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,9,self._ctx) if la_ == 1: - self.state = 233 + self.state = 229 self.match(HogQLParser.SEMICOLON) @@ -1526,31 +1522,31 @@ def catchBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 236 + self.state = 232 self.match(HogQLParser.CATCH) - self.state = 245 + self.state = 241 self._errHandler.sync(self) _la = self._input.LA(1) if _la==130: - self.state = 237 + self.state = 233 self.match(HogQLParser.LPAREN) - self.state = 238 + self.state = 234 localctx.catchVar = self.identifier() - self.state = 241 + self.state = 237 self._errHandler.sync(self) _la = self._input.LA(1) if _la==115: - self.state = 239 + self.state = 235 self.match(HogQLParser.COLON) - self.state = 240 + self.state = 236 localctx.catchType = self.identifier() - self.state = 243 + self.state = 239 self.match(HogQLParser.RPAREN) - self.state = 247 + self.state = 243 localctx.catchStmt = self.block() except RecognitionException as re: localctx.exception = re @@ -1609,27 +1605,27 @@ def tryCatchStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 249 + self.state = 245 self.match(HogQLParser.TRY) - self.state = 250 + self.state = 246 localctx.tryStmt = self.block() - self.state = 254 + self.state = 250 self._errHandler.sync(self) _la = self._input.LA(1) while _la==14: - self.state = 251 + self.state = 247 self.catchBlock() - self.state = 256 + self.state = 252 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 259 + self.state = 255 self._errHandler.sync(self) _la = self._input.LA(1) if _la==29: - self.state = 257 + self.state = 253 self.match(HogQLParser.FINALLY) - self.state = 258 + self.state = 254 localctx.finallyStmt = self.block() @@ -1690,23 +1686,23 @@ def ifStmt(self): self.enterRule(localctx, 20, self.RULE_ifStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 261 + self.state = 257 self.match(HogQLParser.IF) - self.state = 262 + self.state = 258 self.match(HogQLParser.LPAREN) - self.state = 263 + self.state = 259 self.expression() - self.state = 264 + self.state = 260 self.match(HogQLParser.RPAREN) - self.state = 265 + self.state = 261 self.statement() - self.state = 268 + self.state = 264 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,14,self._ctx) if la_ == 1: - self.state = 266 + self.state = 262 self.match(HogQLParser.ELSE) - self.state = 267 + self.state = 263 self.statement() @@ -1764,21 +1760,21 @@ def whileStmt(self): self.enterRule(localctx, 22, self.RULE_whileStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 270 + self.state = 266 self.match(HogQLParser.WHILE) - self.state = 271 + self.state = 267 self.match(HogQLParser.LPAREN) - self.state = 272 + self.state = 268 self.expression() - self.state = 273 + self.state = 269 self.match(HogQLParser.RPAREN) - self.state = 274 + self.state = 270 self.statement() - self.state = 276 + self.state = 272 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,15,self._ctx) if la_ == 1: - self.state = 275 + self.state = 271 self.match(HogQLParser.SEMICOLON) @@ -1864,63 +1860,63 @@ def forStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 278 + self.state = 274 self.match(HogQLParser.FOR) - self.state = 279 + self.state = 275 self.match(HogQLParser.LPAREN) - self.state = 283 + self.state = 279 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,16,self._ctx) if la_ == 1: - self.state = 280 + self.state = 276 localctx.initializerVarDeclr = self.varDecl() elif la_ == 2: - self.state = 281 + self.state = 277 localctx.initializerVarAssignment = self.varAssignment() elif la_ == 3: - self.state = 282 + self.state = 278 localctx.initializerExpression = self.expression() - self.state = 285 + self.state = 281 self.match(HogQLParser.SEMICOLON) - self.state = 287 + self.state = 283 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -4503602311741442) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493036243451903) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 18455) != 0): - self.state = 286 + self.state = 282 localctx.condition = self.expression() - self.state = 289 + self.state = 285 self.match(HogQLParser.SEMICOLON) - self.state = 293 + self.state = 289 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,18,self._ctx) if la_ == 1: - self.state = 290 + self.state = 286 localctx.incrementVarDeclr = self.varDecl() elif la_ == 2: - self.state = 291 + self.state = 287 localctx.incrementVarAssignment = self.varAssignment() elif la_ == 3: - self.state = 292 + self.state = 288 localctx.incrementExpression = self.expression() - self.state = 295 + self.state = 291 self.match(HogQLParser.RPAREN) - self.state = 296 + self.state = 292 self.statement() - self.state = 298 + self.state = 294 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,19,self._ctx) if la_ == 1: - self.state = 297 + self.state = 293 self.match(HogQLParser.SEMICOLON) @@ -1995,37 +1991,37 @@ def forInStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 300 + self.state = 296 self.match(HogQLParser.FOR) - self.state = 301 + self.state = 297 self.match(HogQLParser.LPAREN) - self.state = 302 + self.state = 298 self.match(HogQLParser.LET) - self.state = 303 + self.state = 299 self.identifier() - self.state = 306 + self.state = 302 self._errHandler.sync(self) _la = self._input.LA(1) if _la==116: - self.state = 304 + self.state = 300 self.match(HogQLParser.COMMA) - self.state = 305 + self.state = 301 self.identifier() - self.state = 308 + self.state = 304 self.match(HogQLParser.IN) - self.state = 309 + self.state = 305 self.expression() - self.state = 310 + self.state = 306 self.match(HogQLParser.RPAREN) - self.state = 311 + self.state = 307 self.statement() - self.state = 313 + self.state = 309 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,21,self._ctx) if la_ == 1: - self.state = 312 + self.state = 308 self.match(HogQLParser.SEMICOLON) @@ -2085,23 +2081,23 @@ def funcStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 315 + self.state = 311 self.match(HogQLParser.FN) - self.state = 316 + self.state = 312 self.identifier() - self.state = 317 + self.state = 313 self.match(HogQLParser.LPAREN) - self.state = 319 + self.state = 315 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -725088338784043010) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 3229277487103) != 0): - self.state = 318 + self.state = 314 self.identifierList() - self.state = 321 + self.state = 317 self.match(HogQLParser.RPAREN) - self.state = 322 + self.state = 318 self.block() except RecognitionException as re: localctx.exception = re @@ -2150,13 +2146,13 @@ def varAssignment(self): self.enterRule(localctx, 30, self.RULE_varAssignment) try: self.enterOuterAlt(localctx, 1) - self.state = 324 + self.state = 320 self.expression() - self.state = 325 + self.state = 321 self.match(HogQLParser.COLON) - self.state = 326 + self.state = 322 self.match(HogQLParser.EQ_SINGLE) - self.state = 327 + self.state = 323 self.expression() except RecognitionException as re: localctx.exception = re @@ -2199,13 +2195,13 @@ def exprStmt(self): self.enterRule(localctx, 32, self.RULE_exprStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 329 + self.state = 325 self.expression() - self.state = 331 + self.state = 327 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,23,self._ctx) if la_ == 1: - self.state = 330 + self.state = 326 self.match(HogQLParser.SEMICOLON) @@ -2246,7 +2242,7 @@ def emptyStmt(self): self.enterRule(localctx, 34, self.RULE_emptyStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 333 + self.state = 329 self.match(HogQLParser.SEMICOLON) except RecognitionException as re: localctx.exception = re @@ -2296,19 +2292,19 @@ def block(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 335 + self.state = 331 self.match(HogQLParser.LBRACE) - self.state = 339 + self.state = 335 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & -536887298) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493105500848127) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 4212759) != 0): - self.state = 336 + self.state = 332 self.declaration() - self.state = 341 + self.state = 337 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 342 + self.state = 338 self.match(HogQLParser.RBRACE) except RecognitionException as re: localctx.exception = re @@ -2354,11 +2350,11 @@ def kvPair(self): self.enterRule(localctx, 38, self.RULE_kvPair) try: self.enterOuterAlt(localctx, 1) - self.state = 344 + self.state = 340 self.expression() - self.state = 345 + self.state = 341 self.match(HogQLParser.COLON) - self.state = 346 + self.state = 342 self.expression() except RecognitionException as re: localctx.exception = re @@ -2408,26 +2404,26 @@ def kvPairList(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 348 + self.state = 344 self.kvPair() - self.state = 353 + self.state = 349 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,25,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 349 + self.state = 345 self.match(HogQLParser.COMMA) - self.state = 350 + self.state = 346 self.kvPair() - self.state = 355 + self.state = 351 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,25,self._ctx) - self.state = 357 + self.state = 353 self._errHandler.sync(self) _la = self._input.LA(1) if _la==116: - self.state = 356 + self.state = 352 self.match(HogQLParser.COMMA) @@ -2480,26 +2476,26 @@ def select(self): self.enterRule(localctx, 42, self.RULE_select) try: self.enterOuterAlt(localctx, 1) - self.state = 362 + self.state = 358 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,27,self._ctx) if la_ == 1: - self.state = 359 + self.state = 355 self.selectUnionStmt() pass elif la_ == 2: - self.state = 360 + self.state = 356 self.selectStmt() pass elif la_ == 3: - self.state = 361 + self.state = 357 self.hogqlxTagElement() pass - self.state = 364 + self.state = 360 self.match(HogQLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -2555,19 +2551,19 @@ def selectUnionStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 366 + self.state = 362 self.selectStmtWithParens() - self.state = 372 + self.state = 368 self._errHandler.sync(self) _la = self._input.LA(1) while _la==95: - self.state = 367 + self.state = 363 self.match(HogQLParser.UNION) - self.state = 368 + self.state = 364 self.match(HogQLParser.ALL) - self.state = 369 + self.state = 365 self.selectStmtWithParens() - self.state = 374 + self.state = 370 self._errHandler.sync(self) _la = self._input.LA(1) @@ -2622,26 +2618,26 @@ def selectStmtWithParens(self): localctx = HogQLParser.SelectStmtWithParensContext(self, self._ctx, self.state) self.enterRule(localctx, 46, self.RULE_selectStmtWithParens) try: - self.state = 381 + self.state = 377 self._errHandler.sync(self) token = self._input.LA(1) if token in [79, 102]: self.enterOuterAlt(localctx, 1) - self.state = 375 + self.state = 371 self.selectStmt() pass elif token in [130]: self.enterOuterAlt(localctx, 2) - self.state = 376 + self.state = 372 self.match(HogQLParser.LPAREN) - self.state = 377 + self.state = 373 self.selectUnionStmt() - self.state = 378 + self.state = 374 self.match(HogQLParser.RPAREN) pass elif token in [128]: self.enterOuterAlt(localctx, 3) - self.state = 380 + self.state = 376 self.placeholder() pass else: @@ -2763,81 +2759,81 @@ def selectStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 384 + self.state = 380 self._errHandler.sync(self) _la = self._input.LA(1) if _la==102: - self.state = 383 + self.state = 379 localctx.with_ = self.withClause() - self.state = 386 + self.state = 382 self.match(HogQLParser.SELECT) - self.state = 388 + self.state = 384 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,31,self._ctx) if la_ == 1: - self.state = 387 + self.state = 383 self.match(HogQLParser.DISTINCT) - self.state = 391 + self.state = 387 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,32,self._ctx) if la_ == 1: - self.state = 390 + self.state = 386 self.topClause() - self.state = 393 + self.state = 389 localctx.columns = self.columnExprList() - self.state = 395 + self.state = 391 self._errHandler.sync(self) _la = self._input.LA(1) if _la==34: - self.state = 394 + self.state = 390 localctx.from_ = self.fromClause() - self.state = 398 + self.state = 394 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 2269391999729696) != 0): - self.state = 397 + self.state = 393 self.arrayJoinClause() - self.state = 401 + self.state = 397 self._errHandler.sync(self) _la = self._input.LA(1) if _la==69: - self.state = 400 + self.state = 396 self.prewhereClause() - self.state = 404 + self.state = 400 self._errHandler.sync(self) _la = self._input.LA(1) if _la==99: - self.state = 403 + self.state = 399 localctx.where = self.whereClause() - self.state = 407 + self.state = 403 self._errHandler.sync(self) _la = self._input.LA(1) if _la==36: - self.state = 406 + self.state = 402 self.groupByClause() - self.state = 411 + self.state = 407 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,38,self._ctx) if la_ == 1: - self.state = 409 + self.state = 405 self.match(HogQLParser.WITH) - self.state = 410 + self.state = 406 _la = self._input.LA(1) if not(_la==18 or _la==74): self._errHandler.recoverInline(self) @@ -2846,60 +2842,60 @@ def selectStmt(self): self.consume() - self.state = 415 + self.state = 411 self._errHandler.sync(self) _la = self._input.LA(1) if _la==102: - self.state = 413 + self.state = 409 self.match(HogQLParser.WITH) - self.state = 414 + self.state = 410 self.match(HogQLParser.TOTALS) - self.state = 418 + self.state = 414 self._errHandler.sync(self) _la = self._input.LA(1) if _la==37: - self.state = 417 + self.state = 413 self.havingClause() - self.state = 421 + self.state = 417 self._errHandler.sync(self) _la = self._input.LA(1) if _la==101: - self.state = 420 + self.state = 416 self.windowClause() - self.state = 424 + self.state = 420 self._errHandler.sync(self) _la = self._input.LA(1) if _la==64: - self.state = 423 + self.state = 419 self.orderByClause() - self.state = 428 + self.state = 424 self._errHandler.sync(self) token = self._input.LA(1) if token in [54]: - self.state = 426 + self.state = 422 self.limitAndOffsetClause() pass elif token in [61]: - self.state = 427 + self.state = 423 self.offsetOnlyClause() pass elif token in [-1, 81, 95, 149]: pass else: pass - self.state = 431 + self.state = 427 self._errHandler.sync(self) _la = self._input.LA(1) if _la==81: - self.state = 430 + self.state = 426 self.settingsClause() @@ -2944,9 +2940,9 @@ def withClause(self): self.enterRule(localctx, 50, self.RULE_withClause) try: self.enterOuterAlt(localctx, 1) - self.state = 433 + self.state = 429 self.match(HogQLParser.WITH) - self.state = 434 + self.state = 430 self.withExprList() except RecognitionException as re: localctx.exception = re @@ -2994,17 +2990,17 @@ def topClause(self): self.enterRule(localctx, 52, self.RULE_topClause) try: self.enterOuterAlt(localctx, 1) - self.state = 436 + self.state = 432 self.match(HogQLParser.TOP) - self.state = 437 + self.state = 433 self.match(HogQLParser.DECIMAL_LITERAL) - self.state = 440 + self.state = 436 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,45,self._ctx) if la_ == 1: - self.state = 438 + self.state = 434 self.match(HogQLParser.WITH) - self.state = 439 + self.state = 435 self.match(HogQLParser.TIES) @@ -3049,9 +3045,9 @@ def fromClause(self): self.enterRule(localctx, 54, self.RULE_fromClause) try: self.enterOuterAlt(localctx, 1) - self.state = 442 + self.state = 438 self.match(HogQLParser.FROM) - self.state = 443 + self.state = 439 self.joinExpr(0) except RecognitionException as re: localctx.exception = re @@ -3104,11 +3100,11 @@ def arrayJoinClause(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 446 + self.state = 442 self._errHandler.sync(self) _la = self._input.LA(1) if _la==44 or _la==51: - self.state = 445 + self.state = 441 _la = self._input.LA(1) if not(_la==44 or _la==51): self._errHandler.recoverInline(self) @@ -3117,11 +3113,11 @@ def arrayJoinClause(self): self.consume() - self.state = 448 + self.state = 444 self.match(HogQLParser.ARRAY) - self.state = 449 + self.state = 445 self.match(HogQLParser.JOIN) - self.state = 450 + self.state = 446 self.columnExprList() except RecognitionException as re: localctx.exception = re @@ -3199,35 +3195,35 @@ def windowClause(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 452 + self.state = 448 self.match(HogQLParser.WINDOW) - self.state = 453 + self.state = 449 self.identifier() - self.state = 454 + self.state = 450 self.match(HogQLParser.AS) - self.state = 455 + self.state = 451 self.match(HogQLParser.LPAREN) - self.state = 456 + self.state = 452 self.windowExpr() - self.state = 457 + self.state = 453 self.match(HogQLParser.RPAREN) - self.state = 467 + self.state = 463 self._errHandler.sync(self) _la = self._input.LA(1) while _la==116: - self.state = 458 + self.state = 454 self.match(HogQLParser.COMMA) - self.state = 459 + self.state = 455 self.identifier() - self.state = 460 + self.state = 456 self.match(HogQLParser.AS) - self.state = 461 + self.state = 457 self.match(HogQLParser.LPAREN) - self.state = 462 + self.state = 458 self.windowExpr() - self.state = 463 + self.state = 459 self.match(HogQLParser.RPAREN) - self.state = 469 + self.state = 465 self._errHandler.sync(self) _la = self._input.LA(1) @@ -3272,9 +3268,9 @@ def prewhereClause(self): self.enterRule(localctx, 60, self.RULE_prewhereClause) try: self.enterOuterAlt(localctx, 1) - self.state = 470 + self.state = 466 self.match(HogQLParser.PREWHERE) - self.state = 471 + self.state = 467 self.columnExpr(0) except RecognitionException as re: localctx.exception = re @@ -3317,9 +3313,9 @@ def whereClause(self): self.enterRule(localctx, 62, self.RULE_whereClause) try: self.enterOuterAlt(localctx, 1) - self.state = 473 + self.state = 469 self.match(HogQLParser.WHERE) - self.state = 474 + self.state = 470 self.columnExpr(0) except RecognitionException as re: localctx.exception = re @@ -3378,31 +3374,31 @@ def groupByClause(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 476 + self.state = 472 self.match(HogQLParser.GROUP) - self.state = 477 + self.state = 473 self.match(HogQLParser.BY) - self.state = 484 + self.state = 480 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,48,self._ctx) if la_ == 1: - self.state = 478 + self.state = 474 _la = self._input.LA(1) if not(_la==18 or _la==74): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 479 + self.state = 475 self.match(HogQLParser.LPAREN) - self.state = 480 + self.state = 476 self.columnExprList() - self.state = 481 + self.state = 477 self.match(HogQLParser.RPAREN) pass elif la_ == 2: - self.state = 483 + self.state = 479 self.columnExprList() pass @@ -3448,9 +3444,9 @@ def havingClause(self): self.enterRule(localctx, 66, self.RULE_havingClause) try: self.enterOuterAlt(localctx, 1) - self.state = 486 + self.state = 482 self.match(HogQLParser.HAVING) - self.state = 487 + self.state = 483 self.columnExpr(0) except RecognitionException as re: localctx.exception = re @@ -3496,11 +3492,11 @@ def orderByClause(self): self.enterRule(localctx, 68, self.RULE_orderByClause) try: self.enterOuterAlt(localctx, 1) - self.state = 489 + self.state = 485 self.match(HogQLParser.ORDER) - self.state = 490 + self.state = 486 self.match(HogQLParser.BY) - self.state = 491 + self.state = 487 self.orderExprList() except RecognitionException as re: localctx.exception = re @@ -3546,11 +3542,11 @@ def projectionOrderByClause(self): self.enterRule(localctx, 70, self.RULE_projectionOrderByClause) try: self.enterOuterAlt(localctx, 1) - self.state = 493 + self.state = 489 self.match(HogQLParser.ORDER) - self.state = 494 + self.state = 490 self.match(HogQLParser.BY) - self.state = 495 + self.state = 491 self.columnExprList() except RecognitionException as re: localctx.exception = re @@ -3615,38 +3611,38 @@ def limitAndOffsetClause(self): self.enterRule(localctx, 72, self.RULE_limitAndOffsetClause) self._la = 0 # Token type try: - self.state = 526 + self.state = 522 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,53,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 497 + self.state = 493 self.match(HogQLParser.LIMIT) - self.state = 498 + self.state = 494 self.columnExpr(0) - self.state = 501 + self.state = 497 self._errHandler.sync(self) _la = self._input.LA(1) if _la==116: - self.state = 499 + self.state = 495 self.match(HogQLParser.COMMA) - self.state = 500 + self.state = 496 self.columnExpr(0) - self.state = 507 + self.state = 503 self._errHandler.sync(self) token = self._input.LA(1) if token in [102]: - self.state = 503 + self.state = 499 self.match(HogQLParser.WITH) - self.state = 504 + self.state = 500 self.match(HogQLParser.TIES) pass elif token in [11]: - self.state = 505 + self.state = 501 self.match(HogQLParser.BY) - self.state = 506 + self.state = 502 self.columnExprList() pass elif token in [-1, 81, 95, 149]: @@ -3657,43 +3653,43 @@ def limitAndOffsetClause(self): elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 509 + self.state = 505 self.match(HogQLParser.LIMIT) - self.state = 510 + self.state = 506 self.columnExpr(0) - self.state = 513 + self.state = 509 self._errHandler.sync(self) _la = self._input.LA(1) if _la==102: - self.state = 511 + self.state = 507 self.match(HogQLParser.WITH) - self.state = 512 + self.state = 508 self.match(HogQLParser.TIES) - self.state = 515 + self.state = 511 self.match(HogQLParser.OFFSET) - self.state = 516 + self.state = 512 self.columnExpr(0) pass elif la_ == 3: self.enterOuterAlt(localctx, 3) - self.state = 518 + self.state = 514 self.match(HogQLParser.LIMIT) - self.state = 519 + self.state = 515 self.columnExpr(0) - self.state = 520 + self.state = 516 self.match(HogQLParser.OFFSET) - self.state = 521 + self.state = 517 self.columnExpr(0) - self.state = 524 + self.state = 520 self._errHandler.sync(self) _la = self._input.LA(1) if _la==11: - self.state = 522 + self.state = 518 self.match(HogQLParser.BY) - self.state = 523 + self.state = 519 self.columnExprList() @@ -3741,9 +3737,9 @@ def offsetOnlyClause(self): self.enterRule(localctx, 74, self.RULE_offsetOnlyClause) try: self.enterOuterAlt(localctx, 1) - self.state = 528 + self.state = 524 self.match(HogQLParser.OFFSET) - self.state = 529 + self.state = 525 self.columnExpr(0) except RecognitionException as re: localctx.exception = re @@ -3786,9 +3782,9 @@ def settingsClause(self): self.enterRule(localctx, 76, self.RULE_settingsClause) try: self.enterOuterAlt(localctx, 1) - self.state = 531 + self.state = 527 self.match(HogQLParser.SETTINGS) - self.state = 532 + self.state = 528 self.settingExprList() except RecognitionException as re: localctx.exception = re @@ -3920,7 +3916,7 @@ def joinExpr(self, _p:int=0): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 546 + self.state = 542 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,56,self._ctx) if la_ == 1: @@ -3928,21 +3924,21 @@ def joinExpr(self, _p:int=0): self._ctx = localctx _prevctx = localctx - self.state = 535 + self.state = 531 self.tableExpr(0) - self.state = 537 + self.state = 533 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,54,self._ctx) if la_ == 1: - self.state = 536 + self.state = 532 self.match(HogQLParser.FINAL) - self.state = 540 + self.state = 536 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,55,self._ctx) if la_ == 1: - self.state = 539 + self.state = 535 self.sampleClause() @@ -3952,17 +3948,17 @@ def joinExpr(self, _p:int=0): localctx = HogQLParser.JoinExprParensContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 542 + self.state = 538 self.match(HogQLParser.LPAREN) - self.state = 543 + self.state = 539 self.joinExpr(0) - self.state = 544 + self.state = 540 self.match(HogQLParser.RPAREN) pass self._ctx.stop = self._input.LT(-1) - self.state = 562 + self.state = 558 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,59,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: @@ -3970,47 +3966,47 @@ def joinExpr(self, _p:int=0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 560 + self.state = 556 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,58,self._ctx) if la_ == 1: localctx = HogQLParser.JoinExprCrossOpContext(self, HogQLParser.JoinExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_joinExpr) - self.state = 548 + self.state = 544 if not self.precpred(self._ctx, 3): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") - self.state = 549 + self.state = 545 self.joinOpCross() - self.state = 550 + self.state = 546 self.joinExpr(4) pass elif la_ == 2: localctx = HogQLParser.JoinExprOpContext(self, HogQLParser.JoinExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_joinExpr) - self.state = 552 + self.state = 548 if not self.precpred(self._ctx, 4): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 4)") - self.state = 554 + self.state = 550 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 2269426359468314) != 0) or _la==73 or _la==80: - self.state = 553 + self.state = 549 self.joinOp() - self.state = 556 + self.state = 552 self.match(HogQLParser.JOIN) - self.state = 557 + self.state = 553 self.joinExpr(0) - self.state = 558 + self.state = 554 self.joinConstraintClause() pass - self.state = 564 + self.state = 560 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,59,self._ctx) @@ -4121,21 +4117,21 @@ def joinOp(self): self.enterRule(localctx, 80, self.RULE_joinOp) self._la = 0 # Token type try: - self.state = 608 + self.state = 604 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,73,self._ctx) if la_ == 1: localctx = HogQLParser.JoinOpInnerContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 574 + self.state = 570 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,62,self._ctx) if la_ == 1: - self.state = 566 + self.state = 562 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 274) != 0): - self.state = 565 + self.state = 561 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 274) != 0)): self._errHandler.recoverInline(self) @@ -4144,18 +4140,18 @@ def joinOp(self): self.consume() - self.state = 568 + self.state = 564 self.match(HogQLParser.INNER) pass elif la_ == 2: - self.state = 569 + self.state = 565 self.match(HogQLParser.INNER) - self.state = 571 + self.state = 567 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 274) != 0): - self.state = 570 + self.state = 566 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 274) != 0)): self._errHandler.recoverInline(self) @@ -4167,7 +4163,7 @@ def joinOp(self): pass elif la_ == 3: - self.state = 573 + self.state = 569 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 274) != 0)): self._errHandler.recoverInline(self) @@ -4182,15 +4178,15 @@ def joinOp(self): elif la_ == 2: localctx = HogQLParser.JoinOpLeftRightContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 590 + self.state = 586 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,67,self._ctx) if la_ == 1: - self.state = 577 + self.state = 573 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 282) != 0) or _la==80: - self.state = 576 + self.state = 572 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 282) != 0) or _la==80): self._errHandler.recoverInline(self) @@ -4199,44 +4195,44 @@ def joinOp(self): self.consume() - self.state = 579 + self.state = 575 _la = self._input.LA(1) if not(_la==51 or _la==73): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 581 + self.state = 577 self._errHandler.sync(self) _la = self._input.LA(1) if _la==65: - self.state = 580 + self.state = 576 self.match(HogQLParser.OUTER) pass elif la_ == 2: - self.state = 583 + self.state = 579 _la = self._input.LA(1) if not(_la==51 or _la==73): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 585 + self.state = 581 self._errHandler.sync(self) _la = self._input.LA(1) if _la==65: - self.state = 584 + self.state = 580 self.match(HogQLParser.OUTER) - self.state = 588 + self.state = 584 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 282) != 0) or _la==80: - self.state = 587 + self.state = 583 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 282) != 0) or _la==80): self._errHandler.recoverInline(self) @@ -4253,15 +4249,15 @@ def joinOp(self): elif la_ == 3: localctx = HogQLParser.JoinOpFullContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 606 + self.state = 602 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,72,self._ctx) if la_ == 1: - self.state = 593 + self.state = 589 self._errHandler.sync(self) _la = self._input.LA(1) if _la==1 or _la==4: - self.state = 592 + self.state = 588 _la = self._input.LA(1) if not(_la==1 or _la==4): self._errHandler.recoverInline(self) @@ -4270,34 +4266,34 @@ def joinOp(self): self.consume() - self.state = 595 + self.state = 591 self.match(HogQLParser.FULL) - self.state = 597 + self.state = 593 self._errHandler.sync(self) _la = self._input.LA(1) if _la==65: - self.state = 596 + self.state = 592 self.match(HogQLParser.OUTER) pass elif la_ == 2: - self.state = 599 + self.state = 595 self.match(HogQLParser.FULL) - self.state = 601 + self.state = 597 self._errHandler.sync(self) _la = self._input.LA(1) if _la==65: - self.state = 600 + self.state = 596 self.match(HogQLParser.OUTER) - self.state = 604 + self.state = 600 self._errHandler.sync(self) _la = self._input.LA(1) if _la==1 or _la==4: - self.state = 603 + self.state = 599 _la = self._input.LA(1) if not(_la==1 or _la==4): self._errHandler.recoverInline(self) @@ -4354,19 +4350,19 @@ def joinOpCross(self): localctx = HogQLParser.JoinOpCrossContext(self, self._ctx, self.state) self.enterRule(localctx, 82, self.RULE_joinOpCross) try: - self.state = 613 + self.state = 609 self._errHandler.sync(self) token = self._input.LA(1) if token in [17]: self.enterOuterAlt(localctx, 1) - self.state = 610 + self.state = 606 self.match(HogQLParser.CROSS) - self.state = 611 + self.state = 607 self.match(HogQLParser.JOIN) pass elif token in [116]: self.enterOuterAlt(localctx, 2) - self.state = 612 + self.state = 608 self.match(HogQLParser.COMMA) pass else: @@ -4421,34 +4417,34 @@ def joinConstraintClause(self): localctx = HogQLParser.JoinConstraintClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 84, self.RULE_joinConstraintClause) try: - self.state = 624 + self.state = 620 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,75,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 615 + self.state = 611 self.match(HogQLParser.ON) - self.state = 616 + self.state = 612 self.columnExprList() pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 617 + self.state = 613 self.match(HogQLParser.USING) - self.state = 618 + self.state = 614 self.match(HogQLParser.LPAREN) - self.state = 619 + self.state = 615 self.columnExprList() - self.state = 620 + self.state = 616 self.match(HogQLParser.RPAREN) pass elif la_ == 3: self.enterOuterAlt(localctx, 3) - self.state = 622 + self.state = 618 self.match(HogQLParser.USING) - self.state = 623 + self.state = 619 self.columnExprList() pass @@ -4500,17 +4496,17 @@ def sampleClause(self): self.enterRule(localctx, 86, self.RULE_sampleClause) try: self.enterOuterAlt(localctx, 1) - self.state = 626 + self.state = 622 self.match(HogQLParser.SAMPLE) - self.state = 627 + self.state = 623 self.ratioExpr() - self.state = 630 + self.state = 626 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,76,self._ctx) if la_ == 1: - self.state = 628 + self.state = 624 self.match(HogQLParser.OFFSET) - self.state = 629 + self.state = 625 self.ratioExpr() @@ -4562,17 +4558,17 @@ def orderExprList(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 632 + self.state = 628 self.orderExpr() - self.state = 637 + self.state = 633 self._errHandler.sync(self) _la = self._input.LA(1) while _la==116: - self.state = 633 + self.state = 629 self.match(HogQLParser.COMMA) - self.state = 634 + self.state = 630 self.orderExpr() - self.state = 639 + self.state = 635 self._errHandler.sync(self) _la = self._input.LA(1) @@ -4639,13 +4635,13 @@ def orderExpr(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 640 + self.state = 636 self.columnExpr(0) - self.state = 642 + self.state = 638 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 12583040) != 0): - self.state = 641 + self.state = 637 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 12583040) != 0)): self._errHandler.recoverInline(self) @@ -4654,13 +4650,13 @@ def orderExpr(self): self.consume() - self.state = 646 + self.state = 642 self._errHandler.sync(self) _la = self._input.LA(1) if _la==60: - self.state = 644 + self.state = 640 self.match(HogQLParser.NULLS) - self.state = 645 + self.state = 641 _la = self._input.LA(1) if not(_la==30 or _la==49): self._errHandler.recoverInline(self) @@ -4669,13 +4665,13 @@ def orderExpr(self): self.consume() - self.state = 650 + self.state = 646 self._errHandler.sync(self) _la = self._input.LA(1) if _la==16: - self.state = 648 + self.state = 644 self.match(HogQLParser.COLLATE) - self.state = 649 + self.state = 645 self.match(HogQLParser.STRING_LITERAL) @@ -4726,25 +4722,25 @@ def ratioExpr(self): localctx = HogQLParser.RatioExprContext(self, self._ctx, self.state) self.enterRule(localctx, 92, self.RULE_ratioExpr) try: - self.state = 658 + self.state = 654 self._errHandler.sync(self) token = self._input.LA(1) if token in [128]: self.enterOuterAlt(localctx, 1) - self.state = 652 + self.state = 648 self.placeholder() pass elif token in [43, 57, 106, 107, 108, 109, 118, 120, 139]: self.enterOuterAlt(localctx, 2) - self.state = 653 + self.state = 649 self.numberLiteral() - self.state = 656 + self.state = 652 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,81,self._ctx) if la_ == 1: - self.state = 654 + self.state = 650 self.match(HogQLParser.SLASH) - self.state = 655 + self.state = 651 self.numberLiteral() @@ -4800,17 +4796,17 @@ def settingExprList(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 660 + self.state = 656 self.settingExpr() - self.state = 665 + self.state = 661 self._errHandler.sync(self) _la = self._input.LA(1) while _la==116: - self.state = 661 + self.state = 657 self.match(HogQLParser.COMMA) - self.state = 662 + self.state = 658 self.settingExpr() - self.state = 667 + self.state = 663 self._errHandler.sync(self) _la = self._input.LA(1) @@ -4859,11 +4855,11 @@ def settingExpr(self): self.enterRule(localctx, 96, self.RULE_settingExpr) try: self.enterOuterAlt(localctx, 1) - self.state = 668 + self.state = 664 self.identifier() - self.state = 669 + self.state = 665 self.match(HogQLParser.EQ_SINGLE) - self.state = 670 + self.state = 666 self.literal() except RecognitionException as re: localctx.exception = re @@ -4912,27 +4908,27 @@ def windowExpr(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 673 + self.state = 669 self._errHandler.sync(self) _la = self._input.LA(1) if _la==67: - self.state = 672 + self.state = 668 self.winPartitionByClause() - self.state = 676 + self.state = 672 self._errHandler.sync(self) _la = self._input.LA(1) if _la==64: - self.state = 675 + self.state = 671 self.winOrderByClause() - self.state = 679 + self.state = 675 self._errHandler.sync(self) _la = self._input.LA(1) if _la==71 or _la==76: - self.state = 678 + self.state = 674 self.winFrameClause() @@ -4980,11 +4976,11 @@ def winPartitionByClause(self): self.enterRule(localctx, 100, self.RULE_winPartitionByClause) try: self.enterOuterAlt(localctx, 1) - self.state = 681 + self.state = 677 self.match(HogQLParser.PARTITION) - self.state = 682 + self.state = 678 self.match(HogQLParser.BY) - self.state = 683 + self.state = 679 self.columnExprList() except RecognitionException as re: localctx.exception = re @@ -5030,11 +5026,11 @@ def winOrderByClause(self): self.enterRule(localctx, 102, self.RULE_winOrderByClause) try: self.enterOuterAlt(localctx, 1) - self.state = 685 + self.state = 681 self.match(HogQLParser.ORDER) - self.state = 686 + self.state = 682 self.match(HogQLParser.BY) - self.state = 687 + self.state = 683 self.orderExprList() except RecognitionException as re: localctx.exception = re @@ -5081,14 +5077,14 @@ def winFrameClause(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 689 + self.state = 685 _la = self._input.LA(1) if not(_la==71 or _la==76): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 690 + self.state = 686 self.winFrameExtend() except RecognitionException as re: localctx.exception = re @@ -5163,25 +5159,25 @@ def winFrameExtend(self): localctx = HogQLParser.WinFrameExtendContext(self, self._ctx, self.state) self.enterRule(localctx, 106, self.RULE_winFrameExtend) try: - self.state = 698 + self.state = 694 self._errHandler.sync(self) token = self._input.LA(1) if token in [19, 43, 57, 94, 106, 107, 108, 109, 118, 120, 139]: localctx = HogQLParser.FrameStartContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 692 + self.state = 688 self.winFrameBound() pass elif token in [9]: localctx = HogQLParser.FrameBetweenContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 693 + self.state = 689 self.match(HogQLParser.BETWEEN) - self.state = 694 + self.state = 690 self.winFrameBound() - self.state = 695 + self.state = 691 self.match(HogQLParser.AND) - self.state = 696 + self.state = 692 self.winFrameBound() pass else: @@ -5240,41 +5236,41 @@ def winFrameBound(self): self.enterRule(localctx, 108, self.RULE_winFrameBound) try: self.enterOuterAlt(localctx, 1) - self.state = 712 + self.state = 708 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,88,self._ctx) if la_ == 1: - self.state = 700 + self.state = 696 self.match(HogQLParser.CURRENT) - self.state = 701 + self.state = 697 self.match(HogQLParser.ROW) pass elif la_ == 2: - self.state = 702 + self.state = 698 self.match(HogQLParser.UNBOUNDED) - self.state = 703 + self.state = 699 self.match(HogQLParser.PRECEDING) pass elif la_ == 3: - self.state = 704 + self.state = 700 self.match(HogQLParser.UNBOUNDED) - self.state = 705 + self.state = 701 self.match(HogQLParser.FOLLOWING) pass elif la_ == 4: - self.state = 706 + self.state = 702 self.numberLiteral() - self.state = 707 + self.state = 703 self.match(HogQLParser.PRECEDING) pass elif la_ == 5: - self.state = 709 + self.state = 705 self.numberLiteral() - self.state = 710 + self.state = 706 self.match(HogQLParser.FOLLOWING) pass @@ -5320,9 +5316,9 @@ def expr(self): self.enterRule(localctx, 110, self.RULE_expr) try: self.enterOuterAlt(localctx, 1) - self.state = 714 + self.state = 710 self.columnExpr(0) - self.state = 715 + self.state = 711 self.match(HogQLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -5497,138 +5493,138 @@ def columnTypeExpr(self): self.enterRule(localctx, 112, self.RULE_columnTypeExpr) self._la = 0 # Token type try: - self.state = 773 + self.state = 769 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,96,self._ctx) if la_ == 1: localctx = HogQLParser.ColumnTypeExprSimpleContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 717 + self.state = 713 self.identifier() pass elif la_ == 2: localctx = HogQLParser.ColumnTypeExprNestedContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 718 + self.state = 714 self.identifier() - self.state = 719 + self.state = 715 self.match(HogQLParser.LPAREN) - self.state = 720 + self.state = 716 self.identifier() - self.state = 721 + self.state = 717 self.columnTypeExpr() - self.state = 728 + self.state = 724 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,89,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 722 + self.state = 718 self.match(HogQLParser.COMMA) - self.state = 723 + self.state = 719 self.identifier() - self.state = 724 + self.state = 720 self.columnTypeExpr() - self.state = 730 + self.state = 726 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,89,self._ctx) - self.state = 732 + self.state = 728 self._errHandler.sync(self) _la = self._input.LA(1) if _la==116: - self.state = 731 + self.state = 727 self.match(HogQLParser.COMMA) - self.state = 734 + self.state = 730 self.match(HogQLParser.RPAREN) pass elif la_ == 3: localctx = HogQLParser.ColumnTypeExprEnumContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 736 + self.state = 732 self.identifier() - self.state = 737 + self.state = 733 self.match(HogQLParser.LPAREN) - self.state = 738 + self.state = 734 self.enumValue() - self.state = 743 + self.state = 739 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,91,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 739 + self.state = 735 self.match(HogQLParser.COMMA) - self.state = 740 + self.state = 736 self.enumValue() - self.state = 745 + self.state = 741 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,91,self._ctx) - self.state = 747 + self.state = 743 self._errHandler.sync(self) _la = self._input.LA(1) if _la==116: - self.state = 746 + self.state = 742 self.match(HogQLParser.COMMA) - self.state = 749 + self.state = 745 self.match(HogQLParser.RPAREN) pass elif la_ == 4: localctx = HogQLParser.ColumnTypeExprComplexContext(self, localctx) self.enterOuterAlt(localctx, 4) - self.state = 751 + self.state = 747 self.identifier() - self.state = 752 + self.state = 748 self.match(HogQLParser.LPAREN) - self.state = 753 + self.state = 749 self.columnTypeExpr() - self.state = 758 + self.state = 754 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,93,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 754 + self.state = 750 self.match(HogQLParser.COMMA) - self.state = 755 + self.state = 751 self.columnTypeExpr() - self.state = 760 + self.state = 756 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,93,self._ctx) - self.state = 762 + self.state = 758 self._errHandler.sync(self) _la = self._input.LA(1) if _la==116: - self.state = 761 + self.state = 757 self.match(HogQLParser.COMMA) - self.state = 764 + self.state = 760 self.match(HogQLParser.RPAREN) pass elif la_ == 5: localctx = HogQLParser.ColumnTypeExprParamContext(self, localctx) self.enterOuterAlt(localctx, 5) - self.state = 766 + self.state = 762 self.identifier() - self.state = 767 + self.state = 763 self.match(HogQLParser.LPAREN) - self.state = 769 + self.state = 765 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -4503602311741442) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493036243451903) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 18455) != 0): - self.state = 768 + self.state = 764 self.columnExprList() - self.state = 771 + self.state = 767 self.match(HogQLParser.RPAREN) pass @@ -5680,26 +5676,26 @@ def columnExprList(self): self.enterRule(localctx, 114, self.RULE_columnExprList) try: self.enterOuterAlt(localctx, 1) - self.state = 775 + self.state = 771 self.columnExpr(0) - self.state = 780 + self.state = 776 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,97,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 776 + self.state = 772 self.match(HogQLParser.COMMA) - self.state = 777 + self.state = 773 self.columnExpr(0) - self.state = 782 + self.state = 778 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,97,self._ctx) - self.state = 784 + self.state = 780 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,98,self._ctx) if la_ == 1: - self.state = 783 + self.state = 779 self.match(HogQLParser.COMMA) @@ -6142,6 +6138,8 @@ class ColumnExprWinFunctionTargetContext(ColumnExprContext): def __init__(self, parser, ctx:ParserRuleContext): # actually a HogQLParser.ColumnExprContext super().__init__(parser) + self.columnExprs = None # ColumnExprListContext + self.columnArgList = None # ColumnExprListContext self.copyFrom(ctx) def identifier(self, i:int=None): @@ -6162,14 +6160,14 @@ def RPAREN(self, i:int=None): return self.getTokens(HogQLParser.RPAREN) else: return self.getToken(HogQLParser.RPAREN, i) - def columnExprList(self): - return self.getTypedRuleContext(HogQLParser.ColumnExprListContext,0) + def columnExprList(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(HogQLParser.ColumnExprListContext) + else: + return self.getTypedRuleContext(HogQLParser.ColumnExprListContext,i) def DISTINCT(self): return self.getToken(HogQLParser.DISTINCT, 0) - def columnArgList(self): - return self.getTypedRuleContext(HogQLParser.ColumnArgListContext,0) - def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitColumnExprWinFunctionTarget" ): @@ -6289,6 +6287,30 @@ def accept(self, visitor:ParseTreeVisitor): return visitor.visitChildren(self) + class ColumnExprCallContext(ColumnExprContext): + + def __init__(self, parser, ctx:ParserRuleContext): # actually a HogQLParser.ColumnExprContext + super().__init__(parser) + self.copyFrom(ctx) + + def columnExpr(self): + return self.getTypedRuleContext(HogQLParser.ColumnExprContext,0) + + def LPAREN(self): + return self.getToken(HogQLParser.LPAREN, 0) + def RPAREN(self): + return self.getToken(HogQLParser.RPAREN, 0) + def columnExprList(self): + return self.getTypedRuleContext(HogQLParser.ColumnExprListContext,0) + + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitColumnExprCall" ): + return visitor.visitColumnExprCall(self) + else: + return visitor.visitChildren(self) + + class ColumnExprArrayAccessContext(ColumnExprContext): def __init__(self, parser, ctx:ParserRuleContext): # actually a HogQLParser.ColumnExprContext @@ -6572,6 +6594,8 @@ class ColumnExprWinFunctionContext(ColumnExprContext): def __init__(self, parser, ctx:ParserRuleContext): # actually a HogQLParser.ColumnExprContext super().__init__(parser) + self.columnExprs = None # ColumnExprListContext + self.columnArgList = None # ColumnExprListContext self.copyFrom(ctx) def identifier(self): @@ -6592,14 +6616,14 @@ def RPAREN(self, i:int=None): return self.getTokens(HogQLParser.RPAREN) else: return self.getToken(HogQLParser.RPAREN, i) - def columnExprList(self): - return self.getTypedRuleContext(HogQLParser.ColumnExprListContext,0) + def columnExprList(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(HogQLParser.ColumnExprListContext) + else: + return self.getTypedRuleContext(HogQLParser.ColumnExprListContext,i) def DISTINCT(self): return self.getToken(HogQLParser.DISTINCT, 0) - def columnArgList(self): - return self.getTypedRuleContext(HogQLParser.ColumnArgListContext,0) - def accept(self, visitor:ParseTreeVisitor): if hasattr( visitor, "visitColumnExprWinFunction" ): @@ -6608,6 +6632,23 @@ def accept(self, visitor:ParseTreeVisitor): return visitor.visitChildren(self) + class ColumnExprLambdaContext(ColumnExprContext): + + def __init__(self, parser, ctx:ParserRuleContext): # actually a HogQLParser.ColumnExprContext + super().__init__(parser) + self.copyFrom(ctx) + + def columnLambdaExpr(self): + return self.getTypedRuleContext(HogQLParser.ColumnLambdaExprContext,0) + + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitColumnExprLambda" ): + return visitor.visitColumnExprLambda(self) + else: + return visitor.visitChildren(self) + + class ColumnExprIdentifierContext(ColumnExprContext): def __init__(self, parser, ctx:ParserRuleContext): # actually a HogQLParser.ColumnExprContext @@ -6629,6 +6670,8 @@ class ColumnExprFunctionContext(ColumnExprContext): def __init__(self, parser, ctx:ParserRuleContext): # actually a HogQLParser.ColumnExprContext super().__init__(parser) + self.columnExprs = None # ColumnExprListContext + self.columnArgList = None # ColumnExprListContext self.copyFrom(ctx) def identifier(self): @@ -6646,11 +6689,11 @@ def RPAREN(self, i:int=None): return self.getToken(HogQLParser.RPAREN, i) def DISTINCT(self): return self.getToken(HogQLParser.DISTINCT, 0) - def columnArgList(self): - return self.getTypedRuleContext(HogQLParser.ColumnArgListContext,0) - - def columnExprList(self): - return self.getTypedRuleContext(HogQLParser.ColumnExprListContext,0) + def columnExprList(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(HogQLParser.ColumnExprListContext) + else: + return self.getTypedRuleContext(HogQLParser.ColumnExprListContext,i) def accept(self, visitor:ParseTreeVisitor): @@ -6692,7 +6735,7 @@ def columnExpr(self, _p:int=0): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 935 + self.state = 932 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,118,self._ctx) if la_ == 1: @@ -6700,45 +6743,45 @@ def columnExpr(self, _p:int=0): self._ctx = localctx _prevctx = localctx - self.state = 787 + self.state = 783 self.match(HogQLParser.CASE) - self.state = 789 + self.state = 785 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,99,self._ctx) if la_ == 1: - self.state = 788 + self.state = 784 localctx.caseExpr = self.columnExpr(0) - self.state = 796 + self.state = 792 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 791 + self.state = 787 self.match(HogQLParser.WHEN) - self.state = 792 + self.state = 788 localctx.whenExpr = self.columnExpr(0) - self.state = 793 + self.state = 789 self.match(HogQLParser.THEN) - self.state = 794 + self.state = 790 localctx.thenExpr = self.columnExpr(0) - self.state = 798 + self.state = 794 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==98): break - self.state = 802 + self.state = 798 self._errHandler.sync(self) _la = self._input.LA(1) if _la==25: - self.state = 800 + self.state = 796 self.match(HogQLParser.ELSE) - self.state = 801 + self.state = 797 localctx.elseExpr = self.columnExpr(0) - self.state = 804 + self.state = 800 self.match(HogQLParser.END) pass @@ -6746,17 +6789,17 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprCastContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 806 + self.state = 802 self.match(HogQLParser.CAST) - self.state = 807 + self.state = 803 self.match(HogQLParser.LPAREN) - self.state = 808 + self.state = 804 self.columnExpr(0) - self.state = 809 + self.state = 805 self.match(HogQLParser.AS) - self.state = 810 + self.state = 806 self.columnTypeExpr() - self.state = 811 + self.state = 807 self.match(HogQLParser.RPAREN) pass @@ -6764,9 +6807,9 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprDateContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 813 + self.state = 809 self.match(HogQLParser.DATE) - self.state = 814 + self.state = 810 self.match(HogQLParser.STRING_LITERAL) pass @@ -6774,11 +6817,11 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprIntervalContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 815 + self.state = 811 self.match(HogQLParser.INTERVAL) - self.state = 816 + self.state = 812 self.columnExpr(0) - self.state = 817 + self.state = 813 self.interval() pass @@ -6786,27 +6829,27 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprSubstringContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 819 + self.state = 815 self.match(HogQLParser.SUBSTRING) - self.state = 820 + self.state = 816 self.match(HogQLParser.LPAREN) - self.state = 821 + self.state = 817 self.columnExpr(0) - self.state = 822 + self.state = 818 self.match(HogQLParser.FROM) - self.state = 823 + self.state = 819 self.columnExpr(0) - self.state = 826 + self.state = 822 self._errHandler.sync(self) _la = self._input.LA(1) if _la==33: - self.state = 824 + self.state = 820 self.match(HogQLParser.FOR) - self.state = 825 + self.state = 821 self.columnExpr(0) - self.state = 828 + self.state = 824 self.match(HogQLParser.RPAREN) pass @@ -6814,9 +6857,9 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprTimestampContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 830 + self.state = 826 self.match(HogQLParser.TIMESTAMP) - self.state = 831 + self.state = 827 self.match(HogQLParser.STRING_LITERAL) pass @@ -6824,24 +6867,24 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprTrimContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 832 + self.state = 828 self.match(HogQLParser.TRIM) - self.state = 833 + self.state = 829 self.match(HogQLParser.LPAREN) - self.state = 834 + self.state = 830 _la = self._input.LA(1) if not(_la==10 or _la==50 or _la==90): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 835 + self.state = 831 self.string() - self.state = 836 + self.state = 832 self.match(HogQLParser.FROM) - self.state = 837 + self.state = 833 self.columnExpr(0) - self.state = 838 + self.state = 834 self.match(HogQLParser.RPAREN) pass @@ -6849,54 +6892,54 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprWinFunctionContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 840 + self.state = 836 self.identifier() - self.state = 841 + self.state = 837 self.match(HogQLParser.LPAREN) - self.state = 843 + self.state = 839 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -4503602311741442) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493036243451903) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 18455) != 0): - self.state = 842 - self.columnExprList() + self.state = 838 + localctx.columnExprs = self.columnExprList() - self.state = 845 + self.state = 841 self.match(HogQLParser.RPAREN) - self.state = 855 + self.state = 851 self._errHandler.sync(self) _la = self._input.LA(1) if _la==130: - self.state = 847 + self.state = 843 self.match(HogQLParser.LPAREN) - self.state = 849 + self.state = 845 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,104,self._ctx) if la_ == 1: - self.state = 848 + self.state = 844 self.match(HogQLParser.DISTINCT) - self.state = 852 + self.state = 848 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -4503602311741442) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493036243451903) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 18455) != 0): - self.state = 851 - self.columnArgList() + self.state = 847 + localctx.columnArgList = self.columnExprList() - self.state = 854 + self.state = 850 self.match(HogQLParser.RPAREN) - self.state = 857 + self.state = 853 self.match(HogQLParser.OVER) - self.state = 858 + self.state = 854 self.match(HogQLParser.LPAREN) - self.state = 859 + self.state = 855 self.windowExpr() - self.state = 860 + self.state = 856 self.match(HogQLParser.RPAREN) pass @@ -6904,50 +6947,50 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprWinFunctionTargetContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 862 + self.state = 858 self.identifier() - self.state = 863 + self.state = 859 self.match(HogQLParser.LPAREN) - self.state = 865 + self.state = 861 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -4503602311741442) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493036243451903) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 18455) != 0): - self.state = 864 - self.columnExprList() + self.state = 860 + localctx.columnExprs = self.columnExprList() - self.state = 867 + self.state = 863 self.match(HogQLParser.RPAREN) - self.state = 877 + self.state = 873 self._errHandler.sync(self) _la = self._input.LA(1) if _la==130: - self.state = 869 + self.state = 865 self.match(HogQLParser.LPAREN) - self.state = 871 + self.state = 867 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,108,self._ctx) if la_ == 1: - self.state = 870 + self.state = 866 self.match(HogQLParser.DISTINCT) - self.state = 874 + self.state = 870 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -4503602311741442) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493036243451903) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 18455) != 0): - self.state = 873 - self.columnArgList() + self.state = 869 + localctx.columnArgList = self.columnExprList() - self.state = 876 + self.state = 872 self.match(HogQLParser.RPAREN) - self.state = 879 + self.state = 875 self.match(HogQLParser.OVER) - self.state = 880 + self.state = 876 self.identifier() pass @@ -6955,45 +6998,45 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprFunctionContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 882 + self.state = 878 self.identifier() - self.state = 888 + self.state = 884 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,112,self._ctx) if la_ == 1: - self.state = 883 + self.state = 879 self.match(HogQLParser.LPAREN) - self.state = 885 + self.state = 881 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -4503602311741442) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493036243451903) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 18455) != 0): - self.state = 884 - self.columnExprList() + self.state = 880 + localctx.columnExprs = self.columnExprList() - self.state = 887 + self.state = 883 self.match(HogQLParser.RPAREN) - self.state = 890 + self.state = 886 self.match(HogQLParser.LPAREN) - self.state = 892 + self.state = 888 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,113,self._ctx) if la_ == 1: - self.state = 891 + self.state = 887 self.match(HogQLParser.DISTINCT) - self.state = 895 + self.state = 891 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -4503602311741442) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493036243451903) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 18455) != 0): - self.state = 894 - self.columnArgList() + self.state = 890 + localctx.columnArgList = self.columnExprList() - self.state = 897 + self.state = 893 self.match(HogQLParser.RPAREN) pass @@ -7001,7 +7044,7 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprTagElementContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 899 + self.state = 895 self.hogqlxTagElement() pass @@ -7009,7 +7052,7 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprTemplateStringContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 900 + self.state = 896 self.templateString() pass @@ -7017,7 +7060,7 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprLiteralContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 901 + self.state = 897 self.literal() pass @@ -7025,37 +7068,37 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprNegateContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 902 + self.state = 898 self.match(HogQLParser.DASH) - self.state = 903 - self.columnExpr(19) + self.state = 899 + self.columnExpr(20) pass elif la_ == 15: localctx = HogQLParser.ColumnExprNotContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 904 + self.state = 900 self.match(HogQLParser.NOT) - self.state = 905 - self.columnExpr(13) + self.state = 901 + self.columnExpr(14) pass elif la_ == 16: localctx = HogQLParser.ColumnExprAsteriskContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 909 + self.state = 905 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -725088338784043010) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 3229277487103) != 0): - self.state = 906 + self.state = 902 self.tableIdentifier() - self.state = 907 + self.state = 903 self.match(HogQLParser.DOT) - self.state = 911 + self.state = 907 self.match(HogQLParser.ASTERISK) pass @@ -7063,11 +7106,11 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprSubqueryContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 912 + self.state = 908 self.match(HogQLParser.LPAREN) - self.state = 913 + self.state = 909 self.selectUnionStmt() - self.state = 914 + self.state = 910 self.match(HogQLParser.RPAREN) pass @@ -7075,11 +7118,11 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprParensContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 916 + self.state = 912 self.match(HogQLParser.LPAREN) - self.state = 917 + self.state = 913 self.columnExpr(0) - self.state = 918 + self.state = 914 self.match(HogQLParser.RPAREN) pass @@ -7087,11 +7130,11 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprTupleContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 920 + self.state = 916 self.match(HogQLParser.LPAREN) - self.state = 921 + self.state = 917 self.columnExprList() - self.state = 922 + self.state = 918 self.match(HogQLParser.RPAREN) pass @@ -7099,17 +7142,17 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprArrayContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 924 + self.state = 920 self.match(HogQLParser.LBRACKET) - self.state = 926 + self.state = 922 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -4503602311741442) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493036243451903) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 18455) != 0): - self.state = 925 + self.state = 921 self.columnExprList() - self.state = 928 + self.state = 924 self.match(HogQLParser.RBRACKET) pass @@ -7117,178 +7160,186 @@ def columnExpr(self, _p:int=0): localctx = HogQLParser.ColumnExprDictContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 929 + self.state = 925 self.match(HogQLParser.LBRACE) - self.state = 931 + self.state = 927 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -4503602311741442) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493036243451903) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 18455) != 0): - self.state = 930 + self.state = 926 self.kvPairList() - self.state = 933 + self.state = 929 self.match(HogQLParser.RBRACE) pass elif la_ == 22: + localctx = HogQLParser.ColumnExprLambdaContext(self, localctx) + self._ctx = localctx + _prevctx = localctx + self.state = 930 + self.columnLambdaExpr() + pass + + elif la_ == 23: localctx = HogQLParser.ColumnExprIdentifierContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 934 + self.state = 931 self.columnIdentifier() pass self._ctx.stop = self._input.LT(-1) - self.state = 1041 + self.state = 1044 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,129,self._ctx) + _alt = self._interp.adaptivePredict(self._input,130,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 1039 + self.state = 1042 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,128,self._ctx) + la_ = self._interp.adaptivePredict(self._input,129,self._ctx) if la_ == 1: localctx = HogQLParser.ColumnExprPrecedence1Context(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 937 - if not self.precpred(self._ctx, 18): + self.state = 934 + if not self.precpred(self._ctx, 19): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 18)") - self.state = 941 + raise FailedPredicateException(self, "self.precpred(self._ctx, 19)") + self.state = 938 self._errHandler.sync(self) token = self._input.LA(1) if token in [112]: - self.state = 938 + self.state = 935 localctx.operator = self.match(HogQLParser.ASTERISK) pass elif token in [151]: - self.state = 939 + self.state = 936 localctx.operator = self.match(HogQLParser.SLASH) pass elif token in [138]: - self.state = 940 + self.state = 937 localctx.operator = self.match(HogQLParser.PERCENT) pass else: raise NoViableAltException(self) - self.state = 943 - localctx.right = self.columnExpr(19) + self.state = 940 + localctx.right = self.columnExpr(20) pass elif la_ == 2: localctx = HogQLParser.ColumnExprPrecedence2Context(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 944 - if not self.precpred(self._ctx, 17): + self.state = 941 + if not self.precpred(self._ctx, 18): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 17)") - self.state = 948 + raise FailedPredicateException(self, "self.precpred(self._ctx, 18)") + self.state = 945 self._errHandler.sync(self) token = self._input.LA(1) if token in [139]: - self.state = 945 + self.state = 942 localctx.operator = self.match(HogQLParser.PLUS) pass elif token in [118]: - self.state = 946 + self.state = 943 localctx.operator = self.match(HogQLParser.DASH) pass elif token in [117]: - self.state = 947 + self.state = 944 localctx.operator = self.match(HogQLParser.CONCAT) pass else: raise NoViableAltException(self) - self.state = 950 - localctx.right = self.columnExpr(18) + self.state = 947 + localctx.right = self.columnExpr(19) pass elif la_ == 3: localctx = HogQLParser.ColumnExprPrecedence3Context(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 951 - if not self.precpred(self._ctx, 16): + self.state = 948 + if not self.precpred(self._ctx, 17): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 16)") - self.state = 976 + raise FailedPredicateException(self, "self.precpred(self._ctx, 17)") + self.state = 973 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,124,self._ctx) if la_ == 1: - self.state = 952 + self.state = 949 localctx.operator = self.match(HogQLParser.EQ_DOUBLE) pass elif la_ == 2: - self.state = 953 + self.state = 950 localctx.operator = self.match(HogQLParser.EQ_SINGLE) pass elif la_ == 3: - self.state = 954 + self.state = 951 localctx.operator = self.match(HogQLParser.NOT_EQ) pass elif la_ == 4: - self.state = 955 + self.state = 952 localctx.operator = self.match(HogQLParser.LT_EQ) pass elif la_ == 5: - self.state = 956 + self.state = 953 localctx.operator = self.match(HogQLParser.LT) pass elif la_ == 6: - self.state = 957 + self.state = 954 localctx.operator = self.match(HogQLParser.GT_EQ) pass elif la_ == 7: - self.state = 958 + self.state = 955 localctx.operator = self.match(HogQLParser.GT) pass elif la_ == 8: - self.state = 960 + self.state = 957 self._errHandler.sync(self) _la = self._input.LA(1) if _la==58: - self.state = 959 + self.state = 956 localctx.operator = self.match(HogQLParser.NOT) - self.state = 962 + self.state = 959 self.match(HogQLParser.IN) - self.state = 964 + self.state = 961 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,122,self._ctx) if la_ == 1: - self.state = 963 + self.state = 960 self.match(HogQLParser.COHORT) pass elif la_ == 9: - self.state = 967 + self.state = 964 self._errHandler.sync(self) _la = self._input.LA(1) if _la==58: - self.state = 966 + self.state = 963 localctx.operator = self.match(HogQLParser.NOT) - self.state = 969 + self.state = 966 _la = self._input.LA(1) if not(_la==41 or _la==53): self._errHandler.recoverInline(self) @@ -7298,247 +7349,268 @@ def columnExpr(self, _p:int=0): pass elif la_ == 10: - self.state = 970 + self.state = 967 localctx.operator = self.match(HogQLParser.REGEX_SINGLE) pass elif la_ == 11: - self.state = 971 + self.state = 968 localctx.operator = self.match(HogQLParser.REGEX_DOUBLE) pass elif la_ == 12: - self.state = 972 + self.state = 969 localctx.operator = self.match(HogQLParser.NOT_REGEX) pass elif la_ == 13: - self.state = 973 + self.state = 970 localctx.operator = self.match(HogQLParser.IREGEX_SINGLE) pass elif la_ == 14: - self.state = 974 + self.state = 971 localctx.operator = self.match(HogQLParser.IREGEX_DOUBLE) pass elif la_ == 15: - self.state = 975 + self.state = 972 localctx.operator = self.match(HogQLParser.NOT_IREGEX) pass - self.state = 978 - localctx.right = self.columnExpr(17) + self.state = 975 + localctx.right = self.columnExpr(18) pass elif la_ == 4: localctx = HogQLParser.ColumnExprNullishContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 979 - if not self.precpred(self._ctx, 14): + self.state = 976 + if not self.precpred(self._ctx, 15): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 14)") - self.state = 980 + raise FailedPredicateException(self, "self.precpred(self._ctx, 15)") + self.state = 977 self.match(HogQLParser.NULLISH) - self.state = 981 - self.columnExpr(15) + self.state = 978 + self.columnExpr(16) pass elif la_ == 5: localctx = HogQLParser.ColumnExprAndContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 982 - if not self.precpred(self._ctx, 12): + self.state = 979 + if not self.precpred(self._ctx, 13): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 12)") - self.state = 983 + raise FailedPredicateException(self, "self.precpred(self._ctx, 13)") + self.state = 980 self.match(HogQLParser.AND) - self.state = 984 - self.columnExpr(13) + self.state = 981 + self.columnExpr(14) pass elif la_ == 6: localctx = HogQLParser.ColumnExprOrContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 985 - if not self.precpred(self._ctx, 11): + self.state = 982 + if not self.precpred(self._ctx, 12): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 11)") - self.state = 986 + raise FailedPredicateException(self, "self.precpred(self._ctx, 12)") + self.state = 983 self.match(HogQLParser.OR) - self.state = 987 - self.columnExpr(12) + self.state = 984 + self.columnExpr(13) pass elif la_ == 7: localctx = HogQLParser.ColumnExprBetweenContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 988 - if not self.precpred(self._ctx, 10): + self.state = 985 + if not self.precpred(self._ctx, 11): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 990 + raise FailedPredicateException(self, "self.precpred(self._ctx, 11)") + self.state = 987 self._errHandler.sync(self) _la = self._input.LA(1) if _la==58: - self.state = 989 + self.state = 986 self.match(HogQLParser.NOT) - self.state = 992 + self.state = 989 self.match(HogQLParser.BETWEEN) - self.state = 993 + self.state = 990 self.columnExpr(0) - self.state = 994 + self.state = 991 self.match(HogQLParser.AND) - self.state = 995 - self.columnExpr(11) + self.state = 992 + self.columnExpr(12) pass elif la_ == 8: localctx = HogQLParser.ColumnExprTernaryOpContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 997 - if not self.precpred(self._ctx, 9): + self.state = 994 + if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") - self.state = 998 + raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") + self.state = 995 self.match(HogQLParser.QUERY) - self.state = 999 + self.state = 996 self.columnExpr(0) - self.state = 1000 + self.state = 997 self.match(HogQLParser.COLON) - self.state = 1001 - self.columnExpr(9) + self.state = 998 + self.columnExpr(10) pass elif la_ == 9: - localctx = HogQLParser.ColumnExprArrayAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) + localctx = HogQLParser.ColumnExprCallContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 1003 - if not self.precpred(self._ctx, 25): + self.state = 1000 + if not self.precpred(self._ctx, 30): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 25)") - self.state = 1004 - self.match(HogQLParser.LBRACKET) + raise FailedPredicateException(self, "self.precpred(self._ctx, 30)") + self.state = 1001 + self.match(HogQLParser.LPAREN) + self.state = 1003 + self._errHandler.sync(self) + _la = self._input.LA(1) + if (((_la) & ~0x3f) == 0 and ((1 << _la) & -4503602311741442) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493036243451903) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 18455) != 0): + self.state = 1002 + self.columnExprList() + + self.state = 1005 - self.columnExpr(0) - self.state = 1006 - self.match(HogQLParser.RBRACKET) + self.match(HogQLParser.RPAREN) pass elif la_ == 10: - localctx = HogQLParser.ColumnExprTupleAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) + localctx = HogQLParser.ColumnExprArrayAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 1008 - if not self.precpred(self._ctx, 24): + self.state = 1006 + if not self.precpred(self._ctx, 26): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 24)") + raise FailedPredicateException(self, "self.precpred(self._ctx, 26)") + self.state = 1007 + self.match(HogQLParser.LBRACKET) + self.state = 1008 + self.columnExpr(0) self.state = 1009 - self.match(HogQLParser.DOT) - self.state = 1010 - self.match(HogQLParser.DECIMAL_LITERAL) + self.match(HogQLParser.RBRACKET) pass elif la_ == 11: - localctx = HogQLParser.ColumnExprPropertyAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) + localctx = HogQLParser.ColumnExprTupleAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) self.state = 1011 - if not self.precpred(self._ctx, 23): + if not self.precpred(self._ctx, 25): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 23)") + raise FailedPredicateException(self, "self.precpred(self._ctx, 25)") self.state = 1012 self.match(HogQLParser.DOT) self.state = 1013 - self.identifier() + self.match(HogQLParser.DECIMAL_LITERAL) pass elif la_ == 12: - localctx = HogQLParser.ColumnExprNullArrayAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) + localctx = HogQLParser.ColumnExprPropertyAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) self.state = 1014 - if not self.precpred(self._ctx, 22): + if not self.precpred(self._ctx, 24): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 22)") + raise FailedPredicateException(self, "self.precpred(self._ctx, 24)") self.state = 1015 - self.match(HogQLParser.NULL_PROPERTY) + self.match(HogQLParser.DOT) self.state = 1016 - self.match(HogQLParser.LBRACKET) - self.state = 1017 - self.columnExpr(0) - self.state = 1018 - self.match(HogQLParser.RBRACKET) + self.identifier() pass elif la_ == 13: - localctx = HogQLParser.ColumnExprNullTupleAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) + localctx = HogQLParser.ColumnExprNullArrayAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 1020 - if not self.precpred(self._ctx, 21): + self.state = 1017 + if not self.precpred(self._ctx, 23): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 21)") - self.state = 1021 + raise FailedPredicateException(self, "self.precpred(self._ctx, 23)") + self.state = 1018 self.match(HogQLParser.NULL_PROPERTY) - self.state = 1022 - self.match(HogQLParser.DECIMAL_LITERAL) + self.state = 1019 + self.match(HogQLParser.LBRACKET) + self.state = 1020 + self.columnExpr(0) + self.state = 1021 + self.match(HogQLParser.RBRACKET) pass elif la_ == 14: - localctx = HogQLParser.ColumnExprNullPropertyAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) + localctx = HogQLParser.ColumnExprNullTupleAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) self.state = 1023 - if not self.precpred(self._ctx, 20): + if not self.precpred(self._ctx, 22): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 20)") + raise FailedPredicateException(self, "self.precpred(self._ctx, 22)") self.state = 1024 self.match(HogQLParser.NULL_PROPERTY) self.state = 1025 - self.identifier() + self.match(HogQLParser.DECIMAL_LITERAL) pass elif la_ == 15: - localctx = HogQLParser.ColumnExprIsNullContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) + localctx = HogQLParser.ColumnExprNullPropertyAccessContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) self.state = 1026 - if not self.precpred(self._ctx, 15): + if not self.precpred(self._ctx, 21): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 15)") + raise FailedPredicateException(self, "self.precpred(self._ctx, 21)") self.state = 1027 - self.match(HogQLParser.IS) + self.match(HogQLParser.NULL_PROPERTY) + self.state = 1028 + self.identifier() + pass + + elif la_ == 16: + localctx = HogQLParser.ColumnExprIsNullContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) + self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) self.state = 1029 + if not self.precpred(self._ctx, 16): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 16)") + self.state = 1030 + self.match(HogQLParser.IS) + self.state = 1032 self._errHandler.sync(self) _la = self._input.LA(1) if _la==58: - self.state = 1028 + self.state = 1031 self.match(HogQLParser.NOT) - self.state = 1031 + self.state = 1034 self.match(HogQLParser.NULL_SQL) pass - elif la_ == 16: + elif la_ == 17: localctx = HogQLParser.ColumnExprAliasContext(self, HogQLParser.ColumnExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_columnExpr) - self.state = 1032 - if not self.precpred(self._ctx, 8): + self.state = 1035 + if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 1037 + raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") + self.state = 1040 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,127,self._ctx) + la_ = self._interp.adaptivePredict(self._input,128,self._ctx) if la_ == 1: - self.state = 1033 + self.state = 1036 self.match(HogQLParser.AS) - self.state = 1034 + self.state = 1037 self.identifier() pass elif la_ == 2: - self.state = 1035 + self.state = 1038 self.match(HogQLParser.AS) - self.state = 1036 + self.state = 1039 self.match(HogQLParser.STRING_LITERAL) pass @@ -7546,144 +7618,16 @@ def columnExpr(self, _p:int=0): pass - self.state = 1043 - self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,129,self._ctx) - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.unrollRecursionContexts(_parentctx) - return localctx - - - class ColumnArgListContext(ParserRuleContext): - __slots__ = 'parser' - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def columnArgExpr(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(HogQLParser.ColumnArgExprContext) - else: - return self.getTypedRuleContext(HogQLParser.ColumnArgExprContext,i) - - - def COMMA(self, i:int=None): - if i is None: - return self.getTokens(HogQLParser.COMMA) - else: - return self.getToken(HogQLParser.COMMA, i) - - def getRuleIndex(self): - return HogQLParser.RULE_columnArgList - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitColumnArgList" ): - return visitor.visitColumnArgList(self) - else: - return visitor.visitChildren(self) - - - - - def columnArgList(self): - - localctx = HogQLParser.ColumnArgListContext(self, self._ctx, self.state) - self.enterRule(localctx, 118, self.RULE_columnArgList) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 1044 - self.columnArgExpr() - self.state = 1049 - self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,130,self._ctx) - while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: - if _alt==1: - self.state = 1045 - self.match(HogQLParser.COMMA) - self.state = 1046 - self.columnArgExpr() - self.state = 1051 + self.state = 1046 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,130,self._ctx) - self.state = 1053 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==116: - self.state = 1052 - self.match(HogQLParser.COMMA) - - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class ColumnArgExprContext(ParserRuleContext): - __slots__ = 'parser' - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def columnLambdaExpr(self): - return self.getTypedRuleContext(HogQLParser.ColumnLambdaExprContext,0) - - - def columnExpr(self): - return self.getTypedRuleContext(HogQLParser.ColumnExprContext,0) - - - def getRuleIndex(self): - return HogQLParser.RULE_columnArgExpr - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitColumnArgExpr" ): - return visitor.visitColumnArgExpr(self) - else: - return visitor.visitChildren(self) - - - - - def columnArgExpr(self): - - localctx = HogQLParser.ColumnArgExprContext(self, self._ctx, self.state) - self.enterRule(localctx, 120, self.RULE_columnArgExpr) - try: - self.state = 1057 - self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,132,self._ctx) - if la_ == 1: - self.enterOuterAlt(localctx, 1) - self.state = 1055 - self.columnLambdaExpr() - pass - - elif la_ == 2: - self.enterOuterAlt(localctx, 2) - self.state = 1056 - self.columnExpr(0) - pass - - except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) self._errHandler.recover(self, re) finally: - self.exitRule() + self.unrollRecursionContexts(_parentctx) return localctx @@ -7697,10 +7641,6 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): def ARROW(self): return self.getToken(HogQLParser.ARROW, 0) - def columnExpr(self): - return self.getTypedRuleContext(HogQLParser.ColumnExprContext,0) - - def LPAREN(self): return self.getToken(HogQLParser.LPAREN, 0) @@ -7714,6 +7654,14 @@ def identifier(self, i:int=None): def RPAREN(self): return self.getToken(HogQLParser.RPAREN, 0) + def columnExpr(self): + return self.getTypedRuleContext(HogQLParser.ColumnExprContext,0) + + + def block(self): + return self.getTypedRuleContext(HogQLParser.BlockContext,0) + + def COMMA(self, i:int=None): if i is None: return self.getTokens(HogQLParser.COMMA) @@ -7735,74 +7683,93 @@ def accept(self, visitor:ParseTreeVisitor): def columnLambdaExpr(self): localctx = HogQLParser.ColumnLambdaExprContext(self, self._ctx, self.state) - self.enterRule(localctx, 122, self.RULE_columnLambdaExpr) + self.enterRule(localctx, 118, self.RULE_columnLambdaExpr) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1084 + self.state = 1074 self._errHandler.sync(self) - token = self._input.LA(1) - if token in [130]: - self.state = 1059 + la_ = self._interp.adaptivePredict(self._input,135,self._ctx) + if la_ == 1: + self.state = 1047 self.match(HogQLParser.LPAREN) - self.state = 1060 + self.state = 1048 self.identifier() - self.state = 1065 + self.state = 1053 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,133,self._ctx) + _alt = self._interp.adaptivePredict(self._input,131,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 1061 + self.state = 1049 self.match(HogQLParser.COMMA) - self.state = 1062 + self.state = 1050 self.identifier() - self.state = 1067 + self.state = 1055 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,133,self._ctx) + _alt = self._interp.adaptivePredict(self._input,131,self._ctx) - self.state = 1069 + self.state = 1057 self._errHandler.sync(self) _la = self._input.LA(1) if _la==116: - self.state = 1068 + self.state = 1056 self.match(HogQLParser.COMMA) - self.state = 1071 + self.state = 1059 self.match(HogQLParser.RPAREN) pass - elif token in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 30, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 49, 50, 51, 53, 54, 55, 56, 58, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 101, 102, 103, 105]: - self.state = 1073 + + elif la_ == 2: + self.state = 1061 self.identifier() - self.state = 1078 + self.state = 1066 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,135,self._ctx) + _alt = self._interp.adaptivePredict(self._input,133,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 1074 + self.state = 1062 self.match(HogQLParser.COMMA) - self.state = 1075 + self.state = 1063 self.identifier() - self.state = 1080 + self.state = 1068 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,135,self._ctx) + _alt = self._interp.adaptivePredict(self._input,133,self._ctx) - self.state = 1082 + self.state = 1070 self._errHandler.sync(self) _la = self._input.LA(1) if _la==116: - self.state = 1081 + self.state = 1069 self.match(HogQLParser.COMMA) pass - else: - raise NoViableAltException(self) - self.state = 1086 + elif la_ == 3: + self.state = 1072 + self.match(HogQLParser.LPAREN) + self.state = 1073 + self.match(HogQLParser.RPAREN) + pass + + + self.state = 1076 self.match(HogQLParser.ARROW) - self.state = 1087 - self.columnExpr(0) + self.state = 1079 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,136,self._ctx) + if la_ == 1: + self.state = 1077 + self.columnExpr(0) + pass + + elif la_ == 2: + self.state = 1078 + self.block() + pass + + except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -7910,77 +7877,77 @@ def accept(self, visitor:ParseTreeVisitor): def hogqlxTagElement(self): localctx = HogQLParser.HogqlxTagElementContext(self, self._ctx, self.state) - self.enterRule(localctx, 124, self.RULE_hogqlxTagElement) + self.enterRule(localctx, 120, self.RULE_hogqlxTagElement) self._la = 0 # Token type try: - self.state = 1121 + self.state = 1113 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,141,self._ctx) + la_ = self._interp.adaptivePredict(self._input,140,self._ctx) if la_ == 1: localctx = HogQLParser.HogqlxTagElementClosedContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1089 + self.state = 1081 self.match(HogQLParser.LT) - self.state = 1090 + self.state = 1082 self.identifier() - self.state = 1094 + self.state = 1086 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & -725088338784043010) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 3229277487103) != 0): - self.state = 1091 + self.state = 1083 self.hogqlxTagAttribute() - self.state = 1096 + self.state = 1088 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1097 + self.state = 1089 self.match(HogQLParser.SLASH) - self.state = 1098 + self.state = 1090 self.match(HogQLParser.GT) pass elif la_ == 2: localctx = HogQLParser.HogqlxTagElementNestedContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1100 + self.state = 1092 self.match(HogQLParser.LT) - self.state = 1101 + self.state = 1093 self.identifier() - self.state = 1105 + self.state = 1097 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & -725088338784043010) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 3229277487103) != 0): - self.state = 1102 + self.state = 1094 self.hogqlxTagAttribute() - self.state = 1107 + self.state = 1099 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1108 + self.state = 1100 self.match(HogQLParser.GT) - self.state = 1114 + self.state = 1106 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,140,self._ctx) + la_ = self._interp.adaptivePredict(self._input,139,self._ctx) if la_ == 1: - self.state = 1109 + self.state = 1101 self.hogqlxTagElement() elif la_ == 2: - self.state = 1110 + self.state = 1102 self.match(HogQLParser.LBRACE) - self.state = 1111 + self.state = 1103 self.columnExpr(0) - self.state = 1112 + self.state = 1104 self.match(HogQLParser.RBRACE) - self.state = 1116 + self.state = 1108 self.match(HogQLParser.LT) - self.state = 1117 + self.state = 1109 self.match(HogQLParser.SLASH) - self.state = 1118 + self.state = 1110 self.identifier() - self.state = 1119 + self.state = 1111 self.match(HogQLParser.GT) pass @@ -8037,38 +8004,38 @@ def accept(self, visitor:ParseTreeVisitor): def hogqlxTagAttribute(self): localctx = HogQLParser.HogqlxTagAttributeContext(self, self._ctx, self.state) - self.enterRule(localctx, 126, self.RULE_hogqlxTagAttribute) + self.enterRule(localctx, 122, self.RULE_hogqlxTagAttribute) try: - self.state = 1134 + self.state = 1126 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,142,self._ctx) + la_ = self._interp.adaptivePredict(self._input,141,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 1123 + self.state = 1115 self.identifier() - self.state = 1124 + self.state = 1116 self.match(HogQLParser.EQ_SINGLE) - self.state = 1125 + self.state = 1117 self.string() pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 1127 + self.state = 1119 self.identifier() - self.state = 1128 + self.state = 1120 self.match(HogQLParser.EQ_SINGLE) - self.state = 1129 + self.state = 1121 self.match(HogQLParser.LBRACE) - self.state = 1130 + self.state = 1122 self.columnExpr(0) - self.state = 1131 + self.state = 1123 self.match(HogQLParser.RBRACE) pass elif la_ == 3: self.enterOuterAlt(localctx, 3) - self.state = 1133 + self.state = 1125 self.identifier() pass @@ -8117,30 +8084,30 @@ def accept(self, visitor:ParseTreeVisitor): def withExprList(self): localctx = HogQLParser.WithExprListContext(self, self._ctx, self.state) - self.enterRule(localctx, 128, self.RULE_withExprList) + self.enterRule(localctx, 124, self.RULE_withExprList) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1136 + self.state = 1128 self.withExpr() - self.state = 1141 + self.state = 1133 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,143,self._ctx) + _alt = self._interp.adaptivePredict(self._input,142,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 1137 + self.state = 1129 self.match(HogQLParser.COMMA) - self.state = 1138 + self.state = 1130 self.withExpr() - self.state = 1143 + self.state = 1135 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,143,self._ctx) + _alt = self._interp.adaptivePredict(self._input,142,self._ctx) - self.state = 1145 + self.state = 1137 self._errHandler.sync(self) _la = self._input.LA(1) if _la==116: - self.state = 1144 + self.state = 1136 self.match(HogQLParser.COMMA) @@ -8222,34 +8189,34 @@ def accept(self, visitor:ParseTreeVisitor): def withExpr(self): localctx = HogQLParser.WithExprContext(self, self._ctx, self.state) - self.enterRule(localctx, 130, self.RULE_withExpr) + self.enterRule(localctx, 126, self.RULE_withExpr) try: - self.state = 1157 + self.state = 1149 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,145,self._ctx) + la_ = self._interp.adaptivePredict(self._input,144,self._ctx) if la_ == 1: localctx = HogQLParser.WithExprSubqueryContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1147 + self.state = 1139 self.identifier() - self.state = 1148 + self.state = 1140 self.match(HogQLParser.AS) - self.state = 1149 + self.state = 1141 self.match(HogQLParser.LPAREN) - self.state = 1150 + self.state = 1142 self.selectUnionStmt() - self.state = 1151 + self.state = 1143 self.match(HogQLParser.RPAREN) pass elif la_ == 2: localctx = HogQLParser.WithExprColumnContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1153 + self.state = 1145 self.columnExpr(0) - self.state = 1154 + self.state = 1146 self.match(HogQLParser.AS) - self.state = 1155 + self.state = 1147 self.identifier() pass @@ -8300,29 +8267,29 @@ def accept(self, visitor:ParseTreeVisitor): def columnIdentifier(self): localctx = HogQLParser.ColumnIdentifierContext(self, self._ctx, self.state) - self.enterRule(localctx, 132, self.RULE_columnIdentifier) + self.enterRule(localctx, 128, self.RULE_columnIdentifier) try: - self.state = 1166 + self.state = 1158 self._errHandler.sync(self) token = self._input.LA(1) if token in [128]: self.enterOuterAlt(localctx, 1) - self.state = 1159 + self.state = 1151 self.placeholder() pass elif token in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 30, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 44, 45, 46, 47, 48, 49, 50, 51, 53, 54, 55, 56, 58, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 101, 102, 103, 105]: self.enterOuterAlt(localctx, 2) - self.state = 1163 + self.state = 1155 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,146,self._ctx) + la_ = self._interp.adaptivePredict(self._input,145,self._ctx) if la_ == 1: - self.state = 1160 + self.state = 1152 self.tableIdentifier() - self.state = 1161 + self.state = 1153 self.match(HogQLParser.DOT) - self.state = 1165 + self.state = 1157 self.nestedIdentifier() pass else: @@ -8372,23 +8339,23 @@ def accept(self, visitor:ParseTreeVisitor): def nestedIdentifier(self): localctx = HogQLParser.NestedIdentifierContext(self, self._ctx, self.state) - self.enterRule(localctx, 134, self.RULE_nestedIdentifier) + self.enterRule(localctx, 130, self.RULE_nestedIdentifier) try: self.enterOuterAlt(localctx, 1) - self.state = 1168 + self.state = 1160 self.identifier() - self.state = 1173 + self.state = 1165 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,148,self._ctx) + _alt = self._interp.adaptivePredict(self._input,147,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 1169 + self.state = 1161 self.match(HogQLParser.DOT) - self.state = 1170 + self.state = 1162 self.identifier() - self.state = 1175 + self.state = 1167 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,148,self._ctx) + _alt = self._interp.adaptivePredict(self._input,147,self._ctx) except RecognitionException as re: localctx.exception = re @@ -8535,19 +8502,19 @@ def tableExpr(self, _p:int=0): _parentState = self.state localctx = HogQLParser.TableExprContext(self, self._ctx, _parentState) _prevctx = localctx - _startState = 136 - self.enterRecursionRule(localctx, 136, self.RULE_tableExpr, _p) + _startState = 132 + self.enterRecursionRule(localctx, 132, self.RULE_tableExpr, _p) try: self.enterOuterAlt(localctx, 1) - self.state = 1185 + self.state = 1177 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,149,self._ctx) + la_ = self._interp.adaptivePredict(self._input,148,self._ctx) if la_ == 1: localctx = HogQLParser.TableExprIdentifierContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 1177 + self.state = 1169 self.tableIdentifier() pass @@ -8555,7 +8522,7 @@ def tableExpr(self, _p:int=0): localctx = HogQLParser.TableExprFunctionContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 1178 + self.state = 1170 self.tableFunctionExpr() pass @@ -8563,11 +8530,11 @@ def tableExpr(self, _p:int=0): localctx = HogQLParser.TableExprSubqueryContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 1179 + self.state = 1171 self.match(HogQLParser.LPAREN) - self.state = 1180 + self.state = 1172 self.selectUnionStmt() - self.state = 1181 + self.state = 1173 self.match(HogQLParser.RPAREN) pass @@ -8575,7 +8542,7 @@ def tableExpr(self, _p:int=0): localctx = HogQLParser.TableExprTagContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 1183 + self.state = 1175 self.hogqlxTagElement() pass @@ -8583,15 +8550,15 @@ def tableExpr(self, _p:int=0): localctx = HogQLParser.TableExprPlaceholderContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 1184 + self.state = 1176 self.placeholder() pass self._ctx.stop = self._input.LT(-1) - self.state = 1195 + self.state = 1187 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,151,self._ctx) + _alt = self._interp.adaptivePredict(self._input,150,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: if self._parseListeners is not None: @@ -8599,29 +8566,29 @@ def tableExpr(self, _p:int=0): _prevctx = localctx localctx = HogQLParser.TableExprAliasContext(self, HogQLParser.TableExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_tableExpr) - self.state = 1187 + self.state = 1179 if not self.precpred(self._ctx, 3): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") - self.state = 1191 + self.state = 1183 self._errHandler.sync(self) token = self._input.LA(1) if token in [20, 30, 39, 48, 105]: - self.state = 1188 + self.state = 1180 self.alias() pass elif token in [6]: - self.state = 1189 + self.state = 1181 self.match(HogQLParser.AS) - self.state = 1190 + self.state = 1182 self.identifier() pass else: raise NoViableAltException(self) - self.state = 1197 + self.state = 1189 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,151,self._ctx) + _alt = self._interp.adaptivePredict(self._input,150,self._ctx) except RecognitionException as re: localctx.exception = re @@ -8668,23 +8635,23 @@ def accept(self, visitor:ParseTreeVisitor): def tableFunctionExpr(self): localctx = HogQLParser.TableFunctionExprContext(self, self._ctx, self.state) - self.enterRule(localctx, 138, self.RULE_tableFunctionExpr) + self.enterRule(localctx, 134, self.RULE_tableFunctionExpr) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1198 + self.state = 1190 self.identifier() - self.state = 1199 + self.state = 1191 self.match(HogQLParser.LPAREN) - self.state = 1201 + self.state = 1193 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & -4503602311741442) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 90493036243451903) != 0) or ((((_la - 128)) & ~0x3f) == 0 and ((1 << (_la - 128)) & 18455) != 0): - self.state = 1200 + self.state = 1192 self.tableArgList() - self.state = 1203 + self.state = 1195 self.match(HogQLParser.RPAREN) except RecognitionException as re: localctx.exception = re @@ -8728,20 +8695,20 @@ def accept(self, visitor:ParseTreeVisitor): def tableIdentifier(self): localctx = HogQLParser.TableIdentifierContext(self, self._ctx, self.state) - self.enterRule(localctx, 140, self.RULE_tableIdentifier) + self.enterRule(localctx, 136, self.RULE_tableIdentifier) try: self.enterOuterAlt(localctx, 1) - self.state = 1208 + self.state = 1200 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,153,self._ctx) + la_ = self._interp.adaptivePredict(self._input,152,self._ctx) if la_ == 1: - self.state = 1205 + self.state = 1197 self.databaseIdentifier() - self.state = 1206 + self.state = 1198 self.match(HogQLParser.DOT) - self.state = 1210 + self.state = 1202 self.identifier() except RecognitionException as re: localctx.exception = re @@ -8787,30 +8754,30 @@ def accept(self, visitor:ParseTreeVisitor): def tableArgList(self): localctx = HogQLParser.TableArgListContext(self, self._ctx, self.state) - self.enterRule(localctx, 142, self.RULE_tableArgList) + self.enterRule(localctx, 138, self.RULE_tableArgList) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1212 + self.state = 1204 self.columnExpr(0) - self.state = 1217 + self.state = 1209 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,154,self._ctx) + _alt = self._interp.adaptivePredict(self._input,153,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 1213 + self.state = 1205 self.match(HogQLParser.COMMA) - self.state = 1214 + self.state = 1206 self.columnExpr(0) - self.state = 1219 + self.state = 1211 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,154,self._ctx) + _alt = self._interp.adaptivePredict(self._input,153,self._ctx) - self.state = 1221 + self.state = 1213 self._errHandler.sync(self) _la = self._input.LA(1) if _la==116: - self.state = 1220 + self.state = 1212 self.match(HogQLParser.COMMA) @@ -8849,10 +8816,10 @@ def accept(self, visitor:ParseTreeVisitor): def databaseIdentifier(self): localctx = HogQLParser.DatabaseIdentifierContext(self, self._ctx, self.state) - self.enterRule(localctx, 144, self.RULE_databaseIdentifier) + self.enterRule(localctx, 140, self.RULE_databaseIdentifier) try: self.enterOuterAlt(localctx, 1) - self.state = 1223 + self.state = 1215 self.identifier() except RecognitionException as re: localctx.exception = re @@ -8900,22 +8867,22 @@ def accept(self, visitor:ParseTreeVisitor): def floatingLiteral(self): localctx = HogQLParser.FloatingLiteralContext(self, self._ctx, self.state) - self.enterRule(localctx, 146, self.RULE_floatingLiteral) + self.enterRule(localctx, 142, self.RULE_floatingLiteral) self._la = 0 # Token type try: - self.state = 1233 + self.state = 1225 self._errHandler.sync(self) token = self._input.LA(1) if token in [106]: self.enterOuterAlt(localctx, 1) - self.state = 1225 + self.state = 1217 self.match(HogQLParser.FLOATING_LITERAL) pass elif token in [120]: self.enterOuterAlt(localctx, 2) - self.state = 1226 + self.state = 1218 self.match(HogQLParser.DOT) - self.state = 1227 + self.state = 1219 _la = self._input.LA(1) if not(_la==107 or _la==108): self._errHandler.recoverInline(self) @@ -8925,15 +8892,15 @@ def floatingLiteral(self): pass elif token in [108]: self.enterOuterAlt(localctx, 3) - self.state = 1228 + self.state = 1220 self.match(HogQLParser.DECIMAL_LITERAL) - self.state = 1229 + self.state = 1221 self.match(HogQLParser.DOT) - self.state = 1231 + self.state = 1223 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,156,self._ctx) + la_ = self._interp.adaptivePredict(self._input,155,self._ctx) if la_ == 1: - self.state = 1230 + self.state = 1222 _la = self._input.LA(1) if not(_la==107 or _la==108): self._errHandler.recoverInline(self) @@ -9002,15 +8969,15 @@ def accept(self, visitor:ParseTreeVisitor): def numberLiteral(self): localctx = HogQLParser.NumberLiteralContext(self, self._ctx, self.state) - self.enterRule(localctx, 148, self.RULE_numberLiteral) + self.enterRule(localctx, 144, self.RULE_numberLiteral) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1236 + self.state = 1228 self._errHandler.sync(self) _la = self._input.LA(1) if _la==118 or _la==139: - self.state = 1235 + self.state = 1227 _la = self._input.LA(1) if not(_la==118 or _la==139): self._errHandler.recoverInline(self) @@ -9019,36 +8986,36 @@ def numberLiteral(self): self.consume() - self.state = 1244 + self.state = 1236 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,159,self._ctx) + la_ = self._interp.adaptivePredict(self._input,158,self._ctx) if la_ == 1: - self.state = 1238 + self.state = 1230 self.floatingLiteral() pass elif la_ == 2: - self.state = 1239 + self.state = 1231 self.match(HogQLParser.OCTAL_LITERAL) pass elif la_ == 3: - self.state = 1240 + self.state = 1232 self.match(HogQLParser.DECIMAL_LITERAL) pass elif la_ == 4: - self.state = 1241 + self.state = 1233 self.match(HogQLParser.HEXADECIMAL_LITERAL) pass elif la_ == 5: - self.state = 1242 + self.state = 1234 self.match(HogQLParser.INF) pass elif la_ == 6: - self.state = 1243 + self.state = 1235 self.match(HogQLParser.NAN_SQL) pass @@ -9094,24 +9061,24 @@ def accept(self, visitor:ParseTreeVisitor): def literal(self): localctx = HogQLParser.LiteralContext(self, self._ctx, self.state) - self.enterRule(localctx, 150, self.RULE_literal) + self.enterRule(localctx, 146, self.RULE_literal) try: - self.state = 1249 + self.state = 1241 self._errHandler.sync(self) token = self._input.LA(1) if token in [43, 57, 106, 107, 108, 109, 118, 120, 139]: self.enterOuterAlt(localctx, 1) - self.state = 1246 + self.state = 1238 self.numberLiteral() pass elif token in [110]: self.enterOuterAlt(localctx, 2) - self.state = 1247 + self.state = 1239 self.match(HogQLParser.STRING_LITERAL) pass elif token in [59]: self.enterOuterAlt(localctx, 3) - self.state = 1248 + self.state = 1240 self.match(HogQLParser.NULL_SQL) pass else: @@ -9172,11 +9139,11 @@ def accept(self, visitor:ParseTreeVisitor): def interval(self): localctx = HogQLParser.IntervalContext(self, self._ctx, self.state) - self.enterRule(localctx, 152, self.RULE_interval) + self.enterRule(localctx, 148, self.RULE_interval) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1251 + self.state = 1243 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 108086665936896000) != 0) or ((((_la - 70)) & ~0x3f) == 0 and ((1 << (_la - 70)) & 8724152577) != 0)): self._errHandler.recoverInline(self) @@ -9469,11 +9436,11 @@ def accept(self, visitor:ParseTreeVisitor): def keyword(self): localctx = HogQLParser.KeywordContext(self, self._ctx, self.state) - self.enterRule(localctx, 154, self.RULE_keyword) + self.enterRule(localctx, 150, self.RULE_keyword) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1253 + self.state = 1245 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & -833175004720939010) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & 471908466623) != 0)): self._errHandler.recoverInline(self) @@ -9523,11 +9490,11 @@ def accept(self, visitor:ParseTreeVisitor): def keywordForAlias(self): localctx = HogQLParser.KeywordForAliasContext(self, self._ctx, self.state) - self.enterRule(localctx, 156, self.RULE_keywordForAlias) + self.enterRule(localctx, 152, self.RULE_keywordForAlias) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1255 + self.state = 1247 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 282025807314944) != 0)): self._errHandler.recoverInline(self) @@ -9572,19 +9539,19 @@ def accept(self, visitor:ParseTreeVisitor): def alias(self): localctx = HogQLParser.AliasContext(self, self._ctx, self.state) - self.enterRule(localctx, 158, self.RULE_alias) + self.enterRule(localctx, 154, self.RULE_alias) try: - self.state = 1259 + self.state = 1251 self._errHandler.sync(self) token = self._input.LA(1) if token in [105]: self.enterOuterAlt(localctx, 1) - self.state = 1257 + self.state = 1249 self.match(HogQLParser.IDENTIFIER) pass elif token in [20, 30, 39, 48]: self.enterOuterAlt(localctx, 2) - self.state = 1258 + self.state = 1250 self.keywordForAlias() pass else: @@ -9632,24 +9599,24 @@ def accept(self, visitor:ParseTreeVisitor): def identifier(self): localctx = HogQLParser.IdentifierContext(self, self._ctx, self.state) - self.enterRule(localctx, 160, self.RULE_identifier) + self.enterRule(localctx, 156, self.RULE_identifier) try: - self.state = 1264 + self.state = 1256 self._errHandler.sync(self) token = self._input.LA(1) if token in [105]: self.enterOuterAlt(localctx, 1) - self.state = 1261 + self.state = 1253 self.match(HogQLParser.IDENTIFIER) pass elif token in [21, 38, 55, 56, 70, 78, 97, 103]: self.enterOuterAlt(localctx, 2) - self.state = 1262 + self.state = 1254 self.interval() pass elif token in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 26, 27, 28, 30, 32, 33, 34, 35, 36, 37, 39, 40, 41, 42, 44, 45, 46, 47, 48, 49, 50, 51, 53, 54, 58, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 71, 72, 73, 74, 75, 76, 77, 79, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 98, 99, 101, 102]: self.enterOuterAlt(localctx, 3) - self.state = 1263 + self.state = 1255 self.keyword() pass else: @@ -9697,14 +9664,14 @@ def accept(self, visitor:ParseTreeVisitor): def enumValue(self): localctx = HogQLParser.EnumValueContext(self, self._ctx, self.state) - self.enterRule(localctx, 162, self.RULE_enumValue) + self.enterRule(localctx, 158, self.RULE_enumValue) try: self.enterOuterAlt(localctx, 1) - self.state = 1266 + self.state = 1258 self.string() - self.state = 1267 + self.state = 1259 self.match(HogQLParser.EQ_SINGLE) - self.state = 1268 + self.state = 1260 self.numberLiteral() except RecognitionException as re: localctx.exception = re @@ -9747,14 +9714,14 @@ def accept(self, visitor:ParseTreeVisitor): def placeholder(self): localctx = HogQLParser.PlaceholderContext(self, self._ctx, self.state) - self.enterRule(localctx, 164, self.RULE_placeholder) + self.enterRule(localctx, 160, self.RULE_placeholder) try: self.enterOuterAlt(localctx, 1) - self.state = 1270 + self.state = 1262 self.match(HogQLParser.LBRACE) - self.state = 1271 + self.state = 1263 self.nestedIdentifier() - self.state = 1272 + self.state = 1264 self.match(HogQLParser.RBRACE) except RecognitionException as re: localctx.exception = re @@ -9794,19 +9761,19 @@ def accept(self, visitor:ParseTreeVisitor): def string(self): localctx = HogQLParser.StringContext(self, self._ctx, self.state) - self.enterRule(localctx, 166, self.RULE_string) + self.enterRule(localctx, 162, self.RULE_string) try: - self.state = 1276 + self.state = 1268 self._errHandler.sync(self) token = self._input.LA(1) if token in [110]: self.enterOuterAlt(localctx, 1) - self.state = 1274 + self.state = 1266 self.match(HogQLParser.STRING_LITERAL) pass elif token in [142]: self.enterOuterAlt(localctx, 2) - self.state = 1275 + self.state = 1267 self.templateString() pass else: @@ -9856,23 +9823,23 @@ def accept(self, visitor:ParseTreeVisitor): def templateString(self): localctx = HogQLParser.TemplateStringContext(self, self._ctx, self.state) - self.enterRule(localctx, 168, self.RULE_templateString) + self.enterRule(localctx, 164, self.RULE_templateString) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1278 + self.state = 1270 self.match(HogQLParser.QUOTE_SINGLE_TEMPLATE) - self.state = 1282 + self.state = 1274 self._errHandler.sync(self) _la = self._input.LA(1) while _la==156 or _la==157: - self.state = 1279 + self.state = 1271 self.stringContents() - self.state = 1284 + self.state = 1276 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1285 + self.state = 1277 self.match(HogQLParser.QUOTE_SINGLE) except RecognitionException as re: localctx.exception = re @@ -9918,23 +9885,23 @@ def accept(self, visitor:ParseTreeVisitor): def stringContents(self): localctx = HogQLParser.StringContentsContext(self, self._ctx, self.state) - self.enterRule(localctx, 170, self.RULE_stringContents) + self.enterRule(localctx, 166, self.RULE_stringContents) try: - self.state = 1292 + self.state = 1284 self._errHandler.sync(self) token = self._input.LA(1) if token in [157]: self.enterOuterAlt(localctx, 1) - self.state = 1287 + self.state = 1279 self.match(HogQLParser.STRING_ESCAPE_TRIGGER) - self.state = 1288 + self.state = 1280 self.columnExpr(0) - self.state = 1289 + self.state = 1281 self.match(HogQLParser.RBRACE) pass elif token in [156]: self.enterOuterAlt(localctx, 2) - self.state = 1291 + self.state = 1283 self.match(HogQLParser.STRING_TEXT) pass else: @@ -9984,23 +9951,23 @@ def accept(self, visitor:ParseTreeVisitor): def fullTemplateString(self): localctx = HogQLParser.FullTemplateStringContext(self, self._ctx, self.state) - self.enterRule(localctx, 172, self.RULE_fullTemplateString) + self.enterRule(localctx, 168, self.RULE_fullTemplateString) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1294 + self.state = 1286 self.match(HogQLParser.QUOTE_SINGLE_TEMPLATE_FULL) - self.state = 1298 + self.state = 1290 self._errHandler.sync(self) _la = self._input.LA(1) while _la==158 or _la==159: - self.state = 1295 + self.state = 1287 self.stringContentsFull() - self.state = 1300 + self.state = 1292 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1301 + self.state = 1293 self.match(HogQLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -10046,23 +10013,23 @@ def accept(self, visitor:ParseTreeVisitor): def stringContentsFull(self): localctx = HogQLParser.StringContentsFullContext(self, self._ctx, self.state) - self.enterRule(localctx, 174, self.RULE_stringContentsFull) + self.enterRule(localctx, 170, self.RULE_stringContentsFull) try: - self.state = 1308 + self.state = 1300 self._errHandler.sync(self) token = self._input.LA(1) if token in [159]: self.enterOuterAlt(localctx, 1) - self.state = 1303 + self.state = 1295 self.match(HogQLParser.FULL_STRING_ESCAPE_TRIGGER) - self.state = 1304 + self.state = 1296 self.columnExpr(0) - self.state = 1305 + self.state = 1297 self.match(HogQLParser.RBRACE) pass elif token in [158]: self.enterOuterAlt(localctx, 2) - self.state = 1307 + self.state = 1299 self.match(HogQLParser.FULL_STRING_TEXT) pass else: @@ -10083,7 +10050,7 @@ def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): self._predicates = dict() self._predicates[39] = self.joinExpr_sempred self._predicates[58] = self.columnExpr_sempred - self._predicates[68] = self.tableExpr_sempred + self._predicates[66] = self.tableExpr_sempred pred = self._predicates.get(ruleIndex, None) if pred is None: raise Exception("No predicate with index:" + str(ruleIndex)) @@ -10101,71 +10068,75 @@ def joinExpr_sempred(self, localctx:JoinExprContext, predIndex:int): def columnExpr_sempred(self, localctx:ColumnExprContext, predIndex:int): if predIndex == 2: - return self.precpred(self._ctx, 18) + return self.precpred(self._ctx, 19) if predIndex == 3: - return self.precpred(self._ctx, 17) + return self.precpred(self._ctx, 18) if predIndex == 4: - return self.precpred(self._ctx, 16) + return self.precpred(self._ctx, 17) if predIndex == 5: - return self.precpred(self._ctx, 14) + return self.precpred(self._ctx, 15) if predIndex == 6: - return self.precpred(self._ctx, 12) + return self.precpred(self._ctx, 13) if predIndex == 7: - return self.precpred(self._ctx, 11) + return self.precpred(self._ctx, 12) if predIndex == 8: - return self.precpred(self._ctx, 10) + return self.precpred(self._ctx, 11) if predIndex == 9: - return self.precpred(self._ctx, 9) + return self.precpred(self._ctx, 10) if predIndex == 10: - return self.precpred(self._ctx, 25) + return self.precpred(self._ctx, 30) if predIndex == 11: - return self.precpred(self._ctx, 24) + return self.precpred(self._ctx, 26) if predIndex == 12: - return self.precpred(self._ctx, 23) + return self.precpred(self._ctx, 25) if predIndex == 13: - return self.precpred(self._ctx, 22) + return self.precpred(self._ctx, 24) if predIndex == 14: - return self.precpred(self._ctx, 21) + return self.precpred(self._ctx, 23) if predIndex == 15: - return self.precpred(self._ctx, 20) + return self.precpred(self._ctx, 22) if predIndex == 16: - return self.precpred(self._ctx, 15) + return self.precpred(self._ctx, 21) if predIndex == 17: - return self.precpred(self._ctx, 8) + return self.precpred(self._ctx, 16) - def tableExpr_sempred(self, localctx:TableExprContext, predIndex:int): if predIndex == 18: + return self.precpred(self._ctx, 9) + + + def tableExpr_sempred(self, localctx:TableExprContext, predIndex:int): + if predIndex == 19: return self.precpred(self._ctx, 3) diff --git a/posthog/hogql/grammar/HogQLParserVisitor.py b/posthog/hogql/grammar/HogQLParserVisitor.py index bcd327d6abc68..be0ba9f0e29da 100644 --- a/posthog/hogql/grammar/HogQLParserVisitor.py +++ b/posthog/hogql/grammar/HogQLParserVisitor.py @@ -1,4 +1,4 @@ -# Generated from HogQLParser.g4 by ANTLR 4.13.1 +# Generated from HogQLParser.g4 by ANTLR 4.13.2 from antlr4 import * if "." in __name__: from .HogQLParser import HogQLParser @@ -459,6 +459,11 @@ def visitColumnExprTuple(self, ctx:HogQLParser.ColumnExprTupleContext): return self.visitChildren(ctx) + # Visit a parse tree produced by HogQLParser#ColumnExprCall. + def visitColumnExprCall(self, ctx:HogQLParser.ColumnExprCallContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by HogQLParser#ColumnExprArrayAccess. def visitColumnExprArrayAccess(self, ctx:HogQLParser.ColumnExprArrayAccessContext): return self.visitChildren(ctx) @@ -524,6 +529,11 @@ def visitColumnExprWinFunction(self, ctx:HogQLParser.ColumnExprWinFunctionContex return self.visitChildren(ctx) + # Visit a parse tree produced by HogQLParser#ColumnExprLambda. + def visitColumnExprLambda(self, ctx:HogQLParser.ColumnExprLambdaContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by HogQLParser#ColumnExprIdentifier. def visitColumnExprIdentifier(self, ctx:HogQLParser.ColumnExprIdentifierContext): return self.visitChildren(ctx) @@ -539,16 +549,6 @@ def visitColumnExprAsterisk(self, ctx:HogQLParser.ColumnExprAsteriskContext): return self.visitChildren(ctx) - # Visit a parse tree produced by HogQLParser#columnArgList. - def visitColumnArgList(self, ctx:HogQLParser.ColumnArgListContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by HogQLParser#columnArgExpr. - def visitColumnArgExpr(self, ctx:HogQLParser.ColumnArgExprContext): - return self.visitChildren(ctx) - - # Visit a parse tree produced by HogQLParser#columnLambdaExpr. def visitColumnLambdaExpr(self, ctx:HogQLParser.ColumnLambdaExprContext): return self.visitChildren(ctx) diff --git a/posthog/hogql/grammar/README.md b/posthog/hogql/grammar/README.md index 4022ba71825a7..1b9837a2077ac 100644 --- a/posthog/hogql/grammar/README.md +++ b/posthog/hogql/grammar/README.md @@ -2,16 +2,18 @@ Grammar is located inside `HogQLLexer.g4` and `HogQLParser.g4` files. -To generate source code you need to install locally the `antlr` binary: +To generate source code you need to install locally the `antlr` binary. Run this on macOS: ```bash brew install antlr ``` -or this piece of art if you're using bash on ubuntu for quick access: +In case this installs a newer version than 4.13.2, update [ci-hog.yml](https://github.com/PostHog/posthog/blob/master/.github/workflows/ci-hog.yml) to reflect the changes. + +Run this if you're using bash on ubuntu: ```bash -export ANTLR_VERSION=4.11.1 +export ANTLR_VERSION=4.13.2 sudo apt-get install default-jre mkdir antlr diff --git a/posthog/hogql/parser.py b/posthog/hogql/parser.py index 96afe5ae15bc5..71708d1987032 100644 --- a/posthog/hogql/parser.py +++ b/posthog/hogql/parser.py @@ -907,16 +907,16 @@ def visitColumnExprNot(self, ctx: HogQLParser.ColumnExprNotContext): def visitColumnExprWinFunctionTarget(self, ctx: HogQLParser.ColumnExprWinFunctionTargetContext): return ast.WindowFunction( name=self.visit(ctx.identifier(0)), - exprs=self.visit(ctx.columnExprList()) if ctx.columnExprList() else [], - args=self.visit(ctx.columnArgList()) if ctx.columnArgList() else [], + exprs=self.visit(ctx.columnExprs) if ctx.columnExprs else [], + args=self.visit(ctx.columnArgList) if ctx.columnArgList else [], over_identifier=self.visit(ctx.identifier(1)), ) def visitColumnExprWinFunction(self, ctx: HogQLParser.ColumnExprWinFunctionContext): return ast.WindowFunction( name=self.visit(ctx.identifier()), - exprs=self.visit(ctx.columnExprList()) if ctx.columnExprList() else [], - args=self.visit(ctx.columnArgList()) if ctx.columnArgList() else [], + exprs=self.visit(ctx.columnExprs) if ctx.columnExprs else [], + args=self.visit(ctx.columnArgList) if ctx.columnArgList else [], over_expr=self.visit(ctx.windowExpr()) if ctx.windowExpr() else None, ) @@ -925,10 +925,13 @@ def visitColumnExprIdentifier(self, ctx: HogQLParser.ColumnExprIdentifierContext def visitColumnExprFunction(self, ctx: HogQLParser.ColumnExprFunctionContext): name = self.visit(ctx.identifier()) - column_expr_list = ctx.columnExprList() - parameters = self.visit(column_expr_list) if column_expr_list is not None else None - column_arg_list = ctx.columnArgList() - args = self.visit(column_arg_list) if column_arg_list is not None else [] + + parameters: list[ast.Expr] | None = self.visit(ctx.columnExprs) if ctx.columnExprs is not None else None + # two sets of parameters fn()(), return an empty list for the first even if no parameters + if ctx.LPAREN(1) and parameters is None: + parameters = [] + + args: list[ast.Expr] = self.visit(ctx.columnArgList) if ctx.columnArgList is not None else [] distinct = True if ctx.DISTINCT() else False return ast.Call(name=name, params=parameters, args=args, distinct=distinct) @@ -941,16 +944,10 @@ def visitColumnExprAsterisk(self, ctx: HogQLParser.ColumnExprAsteriskContext): def visitColumnExprTagElement(self, ctx: HogQLParser.ColumnExprTagElementContext): return self.visit(ctx.hogqlxTagElement()) - def visitColumnArgList(self, ctx: HogQLParser.ColumnArgListContext): - return [self.visit(arg) for arg in ctx.columnArgExpr()] - - def visitColumnArgExpr(self, ctx: HogQLParser.ColumnArgExprContext): - return self.visitChildren(ctx) - def visitColumnLambdaExpr(self, ctx: HogQLParser.ColumnLambdaExprContext): return ast.Lambda( args=[self.visit(identifier) for identifier in ctx.identifier()], - expr=self.visit(ctx.columnExpr()), + expr=self.visit(ctx.columnExpr() or ctx.block()), ) def visitWithExprList(self, ctx: HogQLParser.WithExprListContext): @@ -1084,6 +1081,11 @@ def visitColumnExprNullish(self, ctx: HogQLParser.ColumnExprNullishContext): args=[self.visit(ctx.columnExpr(0)), self.visit(ctx.columnExpr(1))], ) + def visitColumnExprCall(self, ctx: HogQLParser.ColumnExprCallContext): + return ast.ExprCall( + expr=self.visit(ctx.columnExpr()), args=self.visit(ctx.columnExprList()) if ctx.columnExprList() else [] + ) + def visitHogqlxTagElementClosed(self, ctx: HogQLParser.HogqlxTagElementClosedContext): kind = self.visit(ctx.identifier()) attributes = [self.visit(a) for a in ctx.hogqlxTagAttribute()] if ctx.hogqlxTagAttribute() else [] diff --git a/posthog/hogql/printer.py b/posthog/hogql/printer.py index 6b50ef0972bb7..1f481d071ce1c 100644 --- a/posthog/hogql/printer.py +++ b/posthog/hogql/printer.py @@ -189,6 +189,43 @@ class JoinExprResponse: where: Optional[ast.Expr] = None +@dataclass +class PrintableMaterializedColumn: + table: Optional[str] + column: str + + def __str__(self) -> str: + if self.table is None: + # XXX: For legacy person properties handling (see comment at instantiation site.) + return self.column + else: + return f"{self.table}.{self.column}" + + +@dataclass +class PrintableMaterializedPropertyGroupItem: + table: str + column: str + property_name: str + + def __str__(self) -> str: + # If the key we're looking for doesn't exist in the map for this property group, an empty string (the default + # value for the `String` type) is returned. Since that is a valid property value, we need to check it here. + return f"{self.has_expr} ? {self.value_expr} : null" + + @property + def __qualified_column(self) -> str: + return f"{self.table}.{self.column}" + + @property + def has_expr(self) -> str: + return f"has({self.__qualified_column}, {self.property_name})" + + @property + def value_expr(self) -> str: + return f"{self.__qualified_column}[{self.property_name}]" + + class _Printer(Visitor): # NOTE: Call "print_ast()", not this class directly. @@ -561,7 +598,137 @@ def visit_lambda(self, node: ast.Lambda): def visit_order_expr(self, node: ast.OrderExpr): return f"{self.visit(node.expr)} {node.order}" + def __get_optimized_property_group_compare_operation(self, node: ast.CompareOperation) -> str | None: + """ + Returns a printed expression corresponding to the provided compare operation, if one of the operands is part of + a property group value and: the comparison can be rewritten so that it can be eligible for use by one or more + the property group's bloom filter data skipping indices, or the expression can be optimized to avoid reading the + property group's map ``values`` subcolumn when doing comparisons to NULL values. + """ + if self.context.modifiers.propertyGroupsMode != PropertyGroupsMode.OPTIMIZED: + return None + + def resolve_field_type(expr: ast.Expr) -> ast.Type | None: + expr_type = expr.type + while isinstance(expr_type, ast.FieldAliasType): + expr_type = expr_type.type + return expr_type + + if node.op in (ast.CompareOperationOp.Eq, ast.CompareOperationOp.NotEq): + # For commutative operations, we can rewrite the expression with parameters in either order without + # affecting the result. + # NOTE: For now, this only works with comparisons to constant values directly since we need to know whether + # or not the non-``PropertyType`` operand is ``NULL`` to be able to rewrite the expression to the correct + # optimized version. This could be extended to support *any* non-``Nullable`` expression as well, so that + # expressions which do not reference a field as part of the expression (and therefore can be resolved to a + # constant value during the initial stages of query execution, e.g. ``lower(concat('X', 'Y'))`` ) can also + # utilize the index. (The same applies to ``In`` comparisons below, too.) + property_type: ast.PropertyType | None = None + constant_expr: ast.Constant | None = None + + # TODO: This doesn't resolve aliases for the constant operand, so this does not comprehensively cover all + # optimizable expressions, but that case seems uncommon enough to avoid for now. + if isinstance(node.right, ast.Constant): + left_type = resolve_field_type(node.left) + if isinstance(left_type, ast.PropertyType): + property_type = left_type + constant_expr = node.right + elif isinstance(node.left, ast.Constant): + right_type = resolve_field_type(node.right) + if isinstance(right_type, ast.PropertyType): + property_type = right_type + constant_expr = node.left + + # TODO: Chained properties could likely be supported here to at least use the keys index. + if property_type is None or len(property_type.chain) > 1: + return None + else: + assert constant_expr is not None # appease mypy - if we got this far, we should have a constant + + property_source = self.__get_materialized_property_source(property_type) + if not isinstance(property_source, PrintableMaterializedPropertyGroupItem): + return None + + if node.op == ast.CompareOperationOp.Eq: + if constant_expr.value is None: + # "IS NULL" can be interpreted as "does not exist in the map" -- this avoids unnecessarily reading + # the ``values`` subcolumn of the map. + return f"not({property_source.has_expr})" + + printed_expr = f"equals({property_source.value_expr}, {self.visit(constant_expr)})" + if constant_expr.value == "": + # If we're comparing to an empty string literal, we need to disambiguate this from the default value + # for the ``Map(String, String)`` type used for storing property group values by also ensuring that + # the property key is present in the map. If this is in a ``WHERE`` clause, this also ensures we can + # still use the data skipping index on keys, even though the values index cannot be used. + printed_expr = f"and({property_source.has_expr}, {printed_expr})" + + return printed_expr + + elif node.op == ast.CompareOperationOp.NotEq: + if constant_expr.value is None: + # "IS NOT NULL" can be interpreted as "does exist in the map" -- this avoids unnecessarily reading + # the ``values`` subcolumn of the map, and also allows us to use the data skipping index on keys. + return property_source.has_expr + + elif node.op in (ast.CompareOperationOp.In): + # ``IN`` is _not_ commutative, so we only need to check the left side operand (in contrast with above.) + left_type = resolve_field_type(node.left) + if not isinstance(left_type, ast.PropertyType): + return None + + # TODO: Chained properties could likely be supported here to at least use the keys index. + if left_type is None or len(left_type.chain) > 1: + return None + + property_source = self.__get_materialized_property_source(left_type) + if not isinstance(property_source, PrintableMaterializedPropertyGroupItem): + return None + + if isinstance(node.right, ast.Constant): + if node.right.value is None: + return "0" + elif node.right.value == "": + # If the RHS is the empty string, we need to disambiguate it from the default value for missing keys. + return f"and({property_source.has_expr}, equals({property_source.value_expr}, {self.visit(node.right)}))" + else: + return f"in({property_source.value_expr}, {self.visit(node.right)})" + elif isinstance(node.right, ast.Tuple): + # If any of the values on the RHS are the empty string, we need to disambiguate it from the default + # value for missing keys. NULLs should also be dropped, but everything else can be passed through as-is. + default_value_expr: ast.Constant | None = None + for expr in node.right.exprs[:]: + if not isinstance(expr, ast.Constant): + return None # only optimize constants for now, see above + elif expr.value is None: + node.right.exprs.remove(expr) + elif expr.value == "": + default_value_expr = expr + node.right.exprs.remove(expr) + if len(node.right.exprs) > 0: + # TODO: Check to see if it'd be faster to do equality comparison here instead? + printed_expr = f"in({property_source.value_expr}, {self.visit(node.right)})" + if default_value_expr is not None: + printed_expr = f"or({printed_expr}, and({property_source.has_expr}, equals({property_source.value_expr}, {self.visit(default_value_expr)})))" + elif default_value_expr is not None: + printed_expr = f"and({property_source.has_expr}, equals({property_source.value_expr}, {self.visit(default_value_expr)}))" + else: + printed_expr = "0" + return printed_expr + else: + # TODO: Alias types are not resolved here (similarly to equality operations above) so some expressions + # are not optimized that possibly could be if we took that additional step to determine whether or not + # they are references to Constant types. + return None + + return None # nothing to optimize + def visit_compare_operation(self, node: ast.CompareOperation): + # If either side of the operation is a property that is part of a property group, special optimizations may + # apply here to ensure that data skipping indexes can be used when possible. + if optimized_property_group_compare_operation := self.__get_optimized_property_group_compare_operation(node): + return optimized_property_group_compare_operation + in_join_constraint = any(isinstance(item, ast.JoinConstraint) for item in self.stack) left = self.visit(node.left) right = self.visit(node.right) @@ -758,7 +925,48 @@ def visit_field(self, node: ast.Field): else: raise ImpossibleASTError(f"Unknown Type, can not print {type(node.type).__name__}") + def __get_optimized_property_group_call(self, node: ast.Call) -> str | None: + """ + Returns a printed expression corresponding to the provided call, if the function is being applied to a property + group value and the function can be rewritten so that it can be eligible for use by the property group's map's + key bloom filter index, or can be optimized to avoid reading the property group's map ``values`` subcolumn. + """ + if self.context.modifiers.propertyGroupsMode != PropertyGroupsMode.OPTIMIZED: + return None + + # XXX: A lot of this is duplicated (sometimes just copy/pasted) from the null equality comparison logic -- it + # might make sense to make it so that ``isNull``/``isNotNull`` is rewritten to comparison expressions before + # this step, similar to how ``equals``/``notEquals`` are interpreted as their comparison operation counterparts. + def resolve_field_type(expr: ast.Expr) -> ast.Type | None: + expr_type = expr.type + while isinstance(expr_type, ast.FieldAliasType): + expr_type = expr_type.type + return expr_type + + if node.name in ("isNull", "isNotNull"): + assert len(node.args) == 1, "expected unary call" + arg_type = resolve_field_type(node.args[0]) + # TODO: can probably optimize chained operations, but will need more thought + if isinstance(arg_type, ast.PropertyType) and len(arg_type.chain) == 1: + property_source = self.__get_materialized_property_source(arg_type) + if not isinstance(property_source, PrintableMaterializedPropertyGroupItem): + return None + + if node.name == "isNull": + return f"not({property_source.has_expr})" + elif node.name == "isNotNull": + return property_source.has_expr + else: + raise ValueError("unexpected node name") + + return None # nothing to optimize + def visit_call(self, node: ast.Call): + # If the argument(s) are part of a property group, special optimizations may apply here to ensure that data + # skipping indexes can be used when possible. + if optimized_property_group_call := self.__get_optimized_property_group_call(node): + return optimized_property_group_call + if node.name in HOGQL_COMPARISON_MAPPING: op = HOGQL_COMPARISON_MAPPING[node.name] if len(node.args) != 2: @@ -1034,9 +1242,15 @@ def visit_field_type(self, type: ast.FieldType): return field_sql - def visit_property_type(self, type: ast.PropertyType): - if type.joined_subquery is not None and type.joined_subquery_field_name is not None: - return f"{self._print_identifier(type.joined_subquery.alias)}.{self._print_identifier(type.joined_subquery_field_name)}" + def __get_materialized_property_source( + self, type: ast.PropertyType + ) -> PrintableMaterializedColumn | PrintableMaterializedPropertyGroupItem | None: + """ + Find a materialized property for the first part of the property chain. + """ + # TODO: It likely makes sense to make this independent of whether or not property groups are used. + if self.context.modifiers.materializationMode == "disabled": + return None field_type = type.field_type field = field_type.resolve_database_field(self.context) @@ -1046,79 +1260,77 @@ def visit_property_type(self, type: ast.PropertyType): while isinstance(table, ast.TableAliasType): table = table.table_type - args: list[str] = [] - - if self.context.modifiers.materializationMode != "disabled": - # find a materialized property for the first part of the chain - materialized_property_sql: Optional[str] = None - from_property_group = False - if isinstance(table, ast.TableType): - if self.dialect == "clickhouse": - table_name = table.table.to_printed_clickhouse(self.context) - else: - table_name = table.table.to_printed_hogql() - if field is None: - raise QueryError(f"Can't resolve field {field_type.name} on table {table_name}") - field_name = cast(Union[Literal["properties"], Literal["person_properties"]], field.name) - - materialized_column = self._get_materialized_column(table_name, type.chain[0], field_name) - if materialized_column: - property_sql = self._print_identifier(materialized_column) - property_sql = f"{self.visit(field_type.table_type)}.{property_sql}" - materialized_property_sql = property_sql - elif self.context.modifiers.propertyGroupsMode == PropertyGroupsMode.ENABLED: - property_name = str(type.chain[0]) - # For now, we're assuming that properties are in either no groups or one group, so just using the - # first group returned is fine. If we start putting properties in multiple groups, this should be - # revisited to find the optimal set (i.e. smallest set) of groups to read from. - for property_group_column in property_groups.get_property_group_columns( - table_name, field_name, property_name - ): - printed_column = ( - f"{self.visit(field_type.table_type)}.{self._print_identifier(property_group_column)}" - ) - printed_property_name = self.context.add_value(property_name) - # If the key we're looking for doesn't exist in the map for this property group, an empty string - # (the default value for the `String` type) is returned. Since that is a valid property value, - # we need to check it here. - materialized_property_sql = f"has({printed_column}, {printed_property_name}) ? {printed_column}[{printed_property_name}] : null" - from_property_group = True - break - elif ( - self.context.within_non_hogql_query - and (isinstance(table, ast.SelectQueryAliasType) and table.alias == "events__pdi__person") - or (isinstance(table, ast.VirtualTableType) and table.field == "poe") + if isinstance(table, ast.TableType): + if self.dialect == "clickhouse": + table_name = table.table.to_printed_clickhouse(self.context) + else: + table_name = table.table.to_printed_hogql() + if field is None: + raise QueryError(f"Can't resolve field {field_type.name} on table {table_name}") + field_name = cast(Union[Literal["properties"], Literal["person_properties"]], field.name) + + materialized_column = self._get_materialized_column(table_name, type.chain[0], field_name) + if materialized_column: + return PrintableMaterializedColumn( + self.visit(field_type.table_type), + self._print_identifier(materialized_column), + ) + elif self.context.modifiers.propertyGroupsMode in ( + PropertyGroupsMode.ENABLED, + PropertyGroupsMode.OPTIMIZED, ): - # :KLUDGE: Legacy person properties handling. Only used within non-HogQL queries, such as insights. - if self.context.modifiers.personsOnEventsMode != PersonsOnEventsMode.DISABLED: - materialized_column = self._get_materialized_column( - "events", str(type.chain[0]), "person_properties" + property_name = str(type.chain[0]) + # For now, we're assuming that properties are in either no groups or one group, so just using the + # first group returned is fine. If we start putting properties in multiple groups, this should be + # revisited to find the optimal set (i.e. smallest set) of groups to read from. + for property_group_column in property_groups.get_property_group_columns( + table_name, field_name, property_name + ): + return PrintableMaterializedPropertyGroupItem( + self.visit(field_type.table_type), + self._print_identifier(property_group_column), + self.context.add_value(property_name), ) - else: - materialized_column = self._get_materialized_column("person", str(type.chain[0]), "properties") - if materialized_column: - materialized_property_sql = self._print_identifier(materialized_column) + elif ( + self.context.within_non_hogql_query + and (isinstance(table, ast.SelectQueryAliasType) and table.alias == "events__pdi__person") + or (isinstance(table, ast.VirtualTableType) and table.field == "poe") + ): + # :KLUDGE: Legacy person properties handling. Only used within non-HogQL queries, such as insights. + if self.context.modifiers.personsOnEventsMode != PersonsOnEventsMode.DISABLED: + materialized_column = self._get_materialized_column("events", str(type.chain[0]), "person_properties") + else: + materialized_column = self._get_materialized_column("person", str(type.chain[0]), "properties") + if materialized_column: + return PrintableMaterializedColumn(None, self._print_identifier(materialized_column)) + + return None + + def visit_property_type(self, type: ast.PropertyType): + if type.joined_subquery is not None and type.joined_subquery_field_name is not None: + return f"{self._print_identifier(type.joined_subquery.alias)}.{self._print_identifier(type.joined_subquery_field_name)}" - if materialized_property_sql is not None: + materialized_property_source = self.__get_materialized_property_source(type) + if materialized_property_source is not None: + if isinstance(materialized_property_source, PrintableMaterializedColumn): # TODO: rematerialize all columns to properly support empty strings and "null" string values. - # (Property values that were retrieved from a property group correctly distinguish between these, so - # these checks are not necessary for those values.) - if not from_property_group: - if self.context.modifiers.materializationMode == MaterializationMode.LEGACY_NULL_AS_STRING: - materialized_property_sql = f"nullIf({materialized_property_sql}, '')" - else: # MaterializationMode AUTO or LEGACY_NULL_AS_NULL - materialized_property_sql = f"nullIf(nullIf({materialized_property_sql}, ''), 'null')" - - if len(type.chain) == 1: - return materialized_property_sql - else: - for name in type.chain[1:]: - args.append(self.context.add_value(name)) - return self._unsafe_json_extract_trim_quotes(materialized_property_sql, args) + if self.context.modifiers.materializationMode == MaterializationMode.LEGACY_NULL_AS_STRING: + materialized_property_sql = f"nullIf({materialized_property_source}, '')" + else: # MaterializationMode AUTO or LEGACY_NULL_AS_NULL + materialized_property_sql = f"nullIf(nullIf({materialized_property_source}, ''), 'null')" + else: + materialized_property_sql = str(materialized_property_source) - for name in type.chain: - args.append(self.context.add_value(name)) - return self._unsafe_json_extract_trim_quotes(self.visit(field_type), args) + if len(type.chain) == 1: + return materialized_property_sql + else: + return self._unsafe_json_extract_trim_quotes( + materialized_property_sql, [self.context.add_value(name) for name in type.chain[1:]] + ) + + return self._unsafe_json_extract_trim_quotes( + self.visit(type.field_type), [self.context.add_value(name) for name in type.chain] + ) def visit_sample_expr(self, node: ast.SampleExpr): sample_value = self.visit_ratio_expr(node.sample_value) diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py index afceaf8f3edf8..bce1453e561f8 100644 --- a/posthog/hogql/property.py +++ b/posthog/hogql/property.py @@ -37,6 +37,7 @@ SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -78,7 +79,7 @@ def visit_call(self, node: ast.Call): self.visit(arg) -def _handle_bool_values(value: ValueT, field: ast.Field, property: Property, team: Team) -> ValueT | bool: +def _handle_bool_values(value: ValueT, expr: ast.Expr, property: Property, team: Team) -> ValueT | bool: if value != "true" and value != "false": return value if property.type == "person": @@ -95,7 +96,10 @@ def _handle_bool_values(value: ValueT, field: ast.Field, property: Property, tea group_type_index=property.group_type_index, ) elif property.type == "data_warehouse_person_property": - key = field.chain[-2] + if not isinstance(expr, ast.Field): + raise Exception(f"Requires a Field expression") + + key = expr.chain[-2] # TODO: pass id of table item being filtered on instead of searching through joins current_join: DataWarehouseJoin | None = ( @@ -156,54 +160,57 @@ def _handle_bool_values(value: ValueT, field: ast.Field, property: Property, tea return value -def _field_to_compare_op( - field: ast.Field, value: ValueT, operator: PropertyOperator, property: Property, is_json_field: bool, team: Team +def _expr_to_compare_op( + expr: ast.Expr, value: ValueT, operator: PropertyOperator, property: Property, is_json_field: bool, team: Team ) -> ast.Expr: if operator == PropertyOperator.IS_SET: return ast.CompareOperation( op=ast.CompareOperationOp.NotEq, - left=field, + left=expr, right=ast.Constant(value=None), ) elif operator == PropertyOperator.IS_NOT_SET: - return ast.Or( - exprs=[ - ast.CompareOperation( - op=ast.CompareOperationOp.Eq, - left=field, - right=ast.Constant(value=None), - ) - ] - + ( - [ - ast.Not( - expr=ast.Call( - name="JSONHas", - args=[ast.Field(chain=field.chain[:-1]), ast.Constant(value=property.key)], - ) + exprs: list[ast.Expr] = [ + ast.CompareOperation( + op=ast.CompareOperationOp.Eq, + left=expr, + right=ast.Constant(value=None), + ) + ] + + if is_json_field: + if not isinstance(expr, ast.Field): + raise Exception(f"Requires a Field expression") + + field = ast.Field(chain=expr.chain[:-1]) + + exprs.append( + ast.Not( + expr=ast.Call( + name="JSONHas", + args=[field, ast.Constant(value=property.key)], ) - ] - if is_json_field - else [] + ) ) - ) + + return ast.Or(exprs=exprs) elif operator == PropertyOperator.ICONTAINS: return ast.CompareOperation( op=ast.CompareOperationOp.ILike, - left=field, + left=expr, right=ast.Constant(value=f"%{value}%"), ) elif operator == PropertyOperator.NOT_ICONTAINS: return ast.CompareOperation( op=ast.CompareOperationOp.NotILike, - left=field, + left=expr, right=ast.Constant(value=f"%{value}%"), ) elif operator == PropertyOperator.REGEX: return ast.Call( name="ifNull", args=[ - ast.Call(name="match", args=[ast.Call(name="toString", args=[field]), ast.Constant(value=value)]), + ast.Call(name="match", args=[ast.Call(name="toString", args=[expr]), ast.Constant(value=value)]), ast.Constant(value=0), ], ) @@ -214,9 +221,7 @@ def _field_to_compare_op( ast.Call( name="not", args=[ - ast.Call( - name="match", args=[ast.Call(name="toString", args=[field]), ast.Constant(value=value)] - ) + ast.Call(name="match", args=[ast.Call(name="toString", args=[expr]), ast.Constant(value=value)]) ], ), ast.Constant(value=1), @@ -225,49 +230,52 @@ def _field_to_compare_op( elif operator == PropertyOperator.EXACT or operator == PropertyOperator.IS_DATE_EXACT: return ast.CompareOperation( op=ast.CompareOperationOp.Eq, - left=field, - right=ast.Constant(value=_handle_bool_values(value, field, property, team)), + left=expr, + right=ast.Constant(value=_handle_bool_values(value, expr, property, team)), ) elif operator == PropertyOperator.IS_NOT: return ast.CompareOperation( op=ast.CompareOperationOp.NotEq, - left=field, - right=ast.Constant(value=_handle_bool_values(value, field, property, team)), + left=expr, + right=ast.Constant(value=_handle_bool_values(value, expr, property, team)), ) elif operator == PropertyOperator.LT or operator == PropertyOperator.IS_DATE_BEFORE: - return ast.CompareOperation(op=ast.CompareOperationOp.Lt, left=field, right=ast.Constant(value=value)) + return ast.CompareOperation(op=ast.CompareOperationOp.Lt, left=expr, right=ast.Constant(value=value)) elif operator == PropertyOperator.GT or operator == PropertyOperator.IS_DATE_AFTER: - return ast.CompareOperation(op=ast.CompareOperationOp.Gt, left=field, right=ast.Constant(value=value)) + return ast.CompareOperation(op=ast.CompareOperationOp.Gt, left=expr, right=ast.Constant(value=value)) elif operator == PropertyOperator.LTE: - return ast.CompareOperation(op=ast.CompareOperationOp.LtEq, left=field, right=ast.Constant(value=value)) + return ast.CompareOperation(op=ast.CompareOperationOp.LtEq, left=expr, right=ast.Constant(value=value)) elif operator == PropertyOperator.GTE: - return ast.CompareOperation(op=ast.CompareOperationOp.GtEq, left=field, right=ast.Constant(value=value)) + return ast.CompareOperation(op=ast.CompareOperationOp.GtEq, left=expr, right=ast.Constant(value=value)) else: raise NotImplementedError(f"PropertyOperator {operator} not implemented") def property_to_expr( - property: list - | dict - | PropertyGroup - | PropertyGroupFilter - | PropertyGroupFilterValue - | Property - | ast.Expr - | EventPropertyFilter - | PersonPropertyFilter - | ElementPropertyFilter - | SessionPropertyFilter - | CohortPropertyFilter - | RecordingPropertyFilter - | GroupPropertyFilter - | FeaturePropertyFilter - | HogQLPropertyFilter - | EmptyPropertyFilter - | DataWarehousePropertyFilter - | DataWarehousePersonPropertyFilter, + property: ( + list + | dict + | PropertyGroup + | PropertyGroupFilter + | PropertyGroupFilterValue + | Property + | ast.Expr + | EventPropertyFilter + | PersonPropertyFilter + | ElementPropertyFilter + | SessionPropertyFilter + | CohortPropertyFilter + | RecordingPropertyFilter + | LogEntryPropertyFilter + | GroupPropertyFilter + | FeaturePropertyFilter + | HogQLPropertyFilter + | EmptyPropertyFilter + | DataWarehousePropertyFilter + | DataWarehousePersonPropertyFilter + ), team: Team, - scope: Literal["event", "person", "session", "replay", "replay_entity", "replay_pdi"] = "event", + scope: Literal["event", "person", "session", "replay", "replay_entity"] = "event", ) -> ast.Expr: if isinstance(property, dict): try: @@ -337,6 +345,8 @@ def property_to_expr( or property.type == "data_warehouse" or property.type == "data_warehouse_person_property" or property.type == "session" + or property.type == "recording" + or property.type == "log_entry" ): if (scope == "person" and property.type != "person") or (scope == "session" and property.type != "session"): raise QueryError(f"The '{property.type}' property filter does not work in '{scope}' scope") @@ -358,7 +368,7 @@ def property_to_expr( raise QueryError("Data warehouse person property filter value must be a string") elif property.type == "group": chain = [f"group_{property.group_type_index}", "properties"] - elif property.type == "data_warehouse": + elif property.type in ["recording", "data_warehouse", "log_entry"]: chain = [] elif property.type == "session" and scope in ["event", "replay"]: chain = ["session"] @@ -368,6 +378,10 @@ def property_to_expr( chain = ["properties"] field = ast.Field(chain=[*chain, property.key]) + expr: ast.Expr = field + + if property.type == "recording" and property.key == "snapshot_source": + expr = ast.Call(name="argMinMerge", args=[field]) if isinstance(value, list): if len(value) == 0: @@ -398,8 +412,8 @@ def property_to_expr( return ast.And(exprs=exprs) return ast.Or(exprs=exprs) - return _field_to_compare_op( - field=field, + return _expr_to_compare_op( + expr=expr, value=value, operator=operator, team=team, @@ -449,8 +463,8 @@ def property_to_expr( return expr if property.key == "href": - return _field_to_compare_op( - field=ast.Field(chain=["elements_chain_href"]), + return _expr_to_compare_op( + expr=ast.Field(chain=["elements_chain_href"]), value=value, operator=operator, team=team, @@ -462,8 +476,8 @@ def property_to_expr( return parse_expr( "arrayExists(text -> {compare}, elements_chain_texts)", { - "compare": _field_to_compare_op( - field=ast.Field(chain=["text"]), + "compare": _expr_to_compare_op( + expr=ast.Field(chain=["text"]), value=value, operator=operator, team=team, diff --git a/posthog/hogql/resolver.py b/posthog/hogql/resolver.py index 332e81324871f..54b5f5f298756 100644 --- a/posthog/hogql/resolver.py +++ b/posthog/hogql/resolver.py @@ -112,7 +112,7 @@ def __init__( self.database = context.database self.cte_counter = 0 - def visit(self, node: ast.Expr | None) -> ast.Expr: + def visit(self, node: ast.AST | None): if isinstance(node, ast.Expr) and node.type is not None: raise ResolutionError( f"Type already resolved for {type(node).__name__} ({type(node.type).__name__}). Can't run again." @@ -518,6 +518,12 @@ def visit_call(self, node: ast.Call): ) return node + def visit_expr_call(self, node: ast.ExprCall): + raise QueryError("You can only call simple functions in HogQL, not expressions") + + def visit_block(self, node: ast.Block): + raise QueryError("You can not use blocks in HogQL") + def visit_lambda(self, node: ast.Lambda): """Visit each SELECT query or subquery.""" diff --git a/posthog/hogql/test/__snapshots__/test_query.ambr b/posthog/hogql/test/__snapshots__/test_query.ambr index 24ff3ec47cb6e..cab20c93492e5 100644 --- a/posthog/hogql/test/__snapshots__/test_query.ambr +++ b/posthog/hogql/test/__snapshots__/test_query.ambr @@ -105,10 +105,10 @@ SELECT sessions.session_id AS session_id, sessions.`$entry_current_url` AS `$entry_current_url` FROM ( - SELECT sessions.session_id AS session_id, nullIf(nullIf(argMinMerge(sessions.entry_url), %(hogql_val_0)s), %(hogql_val_1)s) AS `$entry_current_url`, min(toTimeZone(sessions.min_timestamp, %(hogql_val_2)s)) AS `$start_timestamp` - FROM sessions - WHERE and(equals(sessions.team_id, 420), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, %(hogql_val_3)s), toIntervalDay(3)), toDateTime64('2024-07-06 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, %(hogql_val_4)s), toIntervalDay(3)), toDateTime64('2024-07-04 00:00:00.000000', 6, 'UTC')), 0)) - GROUP BY sessions.session_id, sessions.session_id) AS sessions + SELECT toString(reinterpretAsUUID(bitOr(bitShiftLeft(raw_sessions.session_id_v7, 64), bitShiftRight(raw_sessions.session_id_v7, 64)))) AS session_id, nullIf(nullIf(argMinMerge(raw_sessions.entry_url), %(hogql_val_0)s), %(hogql_val_1)s) AS `$entry_current_url`, min(toTimeZone(raw_sessions.min_timestamp, %(hogql_val_2)s)) AS `$start_timestamp`, raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE equals(raw_sessions.team_id, 420) + GROUP BY raw_sessions.session_id_v7, raw_sessions.session_id_v7) AS sessions WHERE and(ifNull(less(sessions.`$start_timestamp`, toDateTime64('2024-07-06 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(sessions.`$start_timestamp`, toDateTime64('2024-07-04 00:00:00.000000', 6, 'UTC')), 0)) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, max_ast_elements=4000000, max_expanded_ast_elements=4000000, max_bytes_before_external_group_by=0 @@ -136,10 +136,10 @@ SELECT sessions.session_id AS session_id, sessions.`$entry_current_url` AS `$entry_current_url` FROM ( - SELECT sessions.session_id AS session_id, nullIf(nullIf(argMinMerge(sessions.entry_url), %(hogql_val_0)s), %(hogql_val_1)s) AS `$entry_current_url` - FROM sessions - WHERE equals(sessions.team_id, 420) - GROUP BY sessions.session_id, sessions.session_id) AS sessions + SELECT toString(reinterpretAsUUID(bitOr(bitShiftLeft(raw_sessions.session_id_v7, 64), bitShiftRight(raw_sessions.session_id_v7, 64)))) AS session_id, nullIf(nullIf(argMinMerge(raw_sessions.entry_url), %(hogql_val_0)s), %(hogql_val_1)s) AS `$entry_current_url`, raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE equals(raw_sessions.team_id, 420) + GROUP BY raw_sessions.session_id_v7, raw_sessions.session_id_v7) AS sessions WHERE and(ifNull(equals(sessions.`$entry_current_url`, %(hogql_val_2)s), 0), 1) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, max_ast_elements=4000000, max_expanded_ast_elements=4000000, max_bytes_before_external_group_by=0 diff --git a/posthog/hogql/test/_test_parser.py b/posthog/hogql/test/_test_parser.py index b1140be100b5d..280ee65ecf3cf 100644 --- a/posthog/hogql/test/_test_parser.py +++ b/posthog/hogql/test/_test_parser.py @@ -199,6 +199,50 @@ def test_tuples(self): self.assertEqual(self._expr("(1)"), ast.Constant(value=1)) def test_lambdas(self): + self.assertEqual( + self._expr("(x, y) -> x * y"), + ast.Lambda( + args=["x", "y"], + expr=ast.ArithmeticOperation( + op=ast.ArithmeticOperationOp.Mult, + left=ast.Field(chain=["x"]), + right=ast.Field(chain=["y"]), + ), + ), + ) + self.assertEqual( + self._expr("x, y -> x * y"), + ast.Lambda( + args=["x", "y"], + expr=ast.ArithmeticOperation( + op=ast.ArithmeticOperationOp.Mult, + left=ast.Field(chain=["x"]), + right=ast.Field(chain=["y"]), + ), + ), + ) + self.assertEqual( + self._expr("(x) -> x * y"), + ast.Lambda( + args=["x"], + expr=ast.ArithmeticOperation( + op=ast.ArithmeticOperationOp.Mult, + left=ast.Field(chain=["x"]), + right=ast.Field(chain=["y"]), + ), + ), + ) + self.assertEqual( + self._expr("x -> x * y"), + ast.Lambda( + args=["x"], + expr=ast.ArithmeticOperation( + op=ast.ArithmeticOperationOp.Mult, + left=ast.Field(chain=["x"]), + right=ast.Field(chain=["y"]), + ), + ), + ) self.assertEqual( self._expr("arrayMap(x -> x * 2)"), ast.Call( @@ -248,6 +292,54 @@ def test_lambdas(self): ), ) + def test_lambda_blocks(self): + self.assertEqual( + self._expr("(x, y) -> { print('hello'); return x * y }"), + ast.Lambda( + args=["x", "y"], + expr=ast.Block( + declarations=[ + ast.ExprStatement(expr=ast.Call(name="print", args=[ast.Constant(value="hello")])), + ast.ReturnStatement( + expr=ast.ArithmeticOperation( + op=ast.ArithmeticOperationOp.Mult, + left=ast.Field(chain=["x"]), + right=ast.Field(chain=["y"]), + ) + ), + ] + ), + ), + ) + + def test_call_expr(self): + self.assertEqual( + self._expr("asd.asd(123)"), + ast.ExprCall( + expr=ast.Field(chain=["asd", "asd"]), + args=[ast.Constant(value=123)], + ), + ) + self.assertEqual( + self._expr("asd['asd'](123)"), + ast.ExprCall( + expr=ast.ArrayAccess(array=ast.Field(chain=["asd"]), property=ast.Constant(value="asd")), + args=[ast.Constant(value=123)], + ), + ) + self.assertEqual( + self._expr("(x -> x * 2)(3)"), + ast.ExprCall( + expr=ast.Lambda( + args=["x"], + expr=ast.ArithmeticOperation( + op=ast.ArithmeticOperationOp.Mult, left=ast.Field(chain=["x"]), right=ast.Constant(value=2) + ), + ), + args=[ast.Constant(value=3)], + ), + ) + def test_strings(self): self.assertEqual(self._expr("'null'"), ast.Constant(value="null")) self.assertEqual(self._expr("'n''ull'"), ast.Constant(value="n'ull")) diff --git a/posthog/hogql/test/test_bytecode.py b/posthog/hogql/test/test_bytecode.py index 29b2c4e5620b4..967066b3312f1 100644 --- a/posthog/hogql/test/test_bytecode.py +++ b/posthog/hogql/test/test_bytecode.py @@ -1,30 +1,31 @@ import pytest from posthog.hogql.bytecode import to_bytecode, execute_hog -from hogvm.python.operation import Operation as op, HOGQL_BYTECODE_IDENTIFIER as _H +from hogvm.python.operation import Operation as op, HOGQL_BYTECODE_IDENTIFIER as _H, HOGQL_BYTECODE_VERSION from posthog.hogql.errors import NotImplementedError, QueryError from posthog.test.base import BaseTest class TestBytecode(BaseTest): def test_bytecode_create(self): - self.assertEqual(to_bytecode("1 + 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.PLUS]) - self.assertEqual(to_bytecode("1 and 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.AND, 2]) - self.assertEqual(to_bytecode("1 or 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.OR, 2]) + self.assertEqual(to_bytecode("1 + 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.PLUS]) + self.assertEqual(to_bytecode("1 and 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 1, op.INTEGER, 2, op.AND, 2]) + self.assertEqual(to_bytecode("1 or 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 1, op.INTEGER, 2, op.OR, 2]) self.assertEqual( to_bytecode("1 or (2 and 1) or 2"), [ _H, - op.INTEGER, - 2, + HOGQL_BYTECODE_VERSION, op.INTEGER, 1, op.INTEGER, 2, + op.INTEGER, + 1, op.AND, 2, op.INTEGER, - 1, + 2, op.OR, 3, ], @@ -33,39 +34,41 @@ def test_bytecode_create(self): to_bytecode("(1 or 2) and (1 or 2)"), [ _H, - op.INTEGER, - 2, + HOGQL_BYTECODE_VERSION, op.INTEGER, 1, - op.OR, - 2, op.INTEGER, 2, + op.OR, + 2, op.INTEGER, 1, + op.INTEGER, + 2, op.OR, 2, op.AND, 2, ], ) - self.assertEqual(to_bytecode("not true"), [_H, op.TRUE, op.NOT]) - self.assertEqual(to_bytecode("true"), [_H, op.TRUE]) - self.assertEqual(to_bytecode("false"), [_H, op.FALSE]) - self.assertEqual(to_bytecode("null"), [_H, op.NULL]) - self.assertEqual(to_bytecode("3.14"), [_H, op.FLOAT, 3.14]) + self.assertEqual(to_bytecode("not true"), [_H, HOGQL_BYTECODE_VERSION, op.TRUE, op.NOT]) + self.assertEqual(to_bytecode("true"), [_H, HOGQL_BYTECODE_VERSION, op.TRUE]) + self.assertEqual(to_bytecode("false"), [_H, HOGQL_BYTECODE_VERSION, op.FALSE]) + self.assertEqual(to_bytecode("null"), [_H, HOGQL_BYTECODE_VERSION, op.NULL]) + self.assertEqual(to_bytecode("3.14"), [_H, HOGQL_BYTECODE_VERSION, op.FLOAT, 3.14]) self.assertEqual( to_bytecode("properties.bla"), - [_H, op.STRING, "bla", op.STRING, "properties", op.GET_GLOBAL, 2], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "bla", op.STRING, "properties", op.GET_GLOBAL, 2], ) self.assertEqual( to_bytecode("concat('arg', 'another')"), - [_H, op.STRING, "another", op.STRING, "arg", op.CALL, "concat", 2], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "arg", op.STRING, "another", op.CALL_GLOBAL, "concat", 2], ) self.assertEqual( to_bytecode("ifNull(properties.email, false)"), [ _H, + HOGQL_BYTECODE_VERSION, op.STRING, "email", op.STRING, @@ -78,91 +81,111 @@ def test_bytecode_create(self): op.FALSE, ], ) - self.assertEqual(to_bytecode("1 = 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.EQ]) - self.assertEqual(to_bytecode("1 == 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.EQ]) - self.assertEqual(to_bytecode("1 != 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.NOT_EQ]) - self.assertEqual(to_bytecode("1 < 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.LT]) - self.assertEqual(to_bytecode("1 <= 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.LT_EQ]) - self.assertEqual(to_bytecode("1 > 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.GT]) - self.assertEqual(to_bytecode("1 >= 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.GT_EQ]) - self.assertEqual(to_bytecode("1 like 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.LIKE]) - self.assertEqual(to_bytecode("1 ilike 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.ILIKE]) - self.assertEqual(to_bytecode("1 not like 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.NOT_LIKE]) + self.assertEqual(to_bytecode("1 = 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.EQ]) + self.assertEqual(to_bytecode("1 == 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.EQ]) + self.assertEqual(to_bytecode("1 != 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.NOT_EQ]) + self.assertEqual(to_bytecode("1 < 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.LT]) + self.assertEqual(to_bytecode("1 <= 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.LT_EQ]) + self.assertEqual(to_bytecode("1 > 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.GT]) + self.assertEqual(to_bytecode("1 >= 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.GT_EQ]) + self.assertEqual(to_bytecode("1 like 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.LIKE]) + self.assertEqual(to_bytecode("1 ilike 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.ILIKE]) + self.assertEqual( + to_bytecode("1 not like 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.NOT_LIKE] + ) self.assertEqual( to_bytecode("1 not ilike 2"), - [_H, op.INTEGER, 2, op.INTEGER, 1, op.NOT_ILIKE], + [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.NOT_ILIKE], + ) + self.assertEqual(to_bytecode("1 in 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.IN]) + self.assertEqual( + to_bytecode("1 not in 2"), [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 2, op.INTEGER, 1, op.NOT_IN] ) - self.assertEqual(to_bytecode("1 in 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.IN]) - self.assertEqual(to_bytecode("1 not in 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.NOT_IN]) self.assertEqual( to_bytecode("'string' ~ 'regex'"), - [_H, op.STRING, "regex", op.STRING, "string", op.REGEX], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "regex", op.STRING, "string", op.REGEX], ) self.assertEqual( to_bytecode("'string' =~ 'regex'"), - [_H, op.STRING, "regex", op.STRING, "string", op.REGEX], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "regex", op.STRING, "string", op.REGEX], ) self.assertEqual( to_bytecode("'string' !~ 'regex'"), - [_H, op.STRING, "regex", op.STRING, "string", op.NOT_REGEX], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "regex", op.STRING, "string", op.NOT_REGEX], ) self.assertEqual( to_bytecode("'string' ~* 'regex'"), - [_H, op.STRING, "regex", op.STRING, "string", op.IREGEX], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "regex", op.STRING, "string", op.IREGEX], ) self.assertEqual( to_bytecode("'string' =~* 'regex'"), - [_H, op.STRING, "regex", op.STRING, "string", op.IREGEX], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "regex", op.STRING, "string", op.IREGEX], ) self.assertEqual( to_bytecode("'string' !~* 'regex'"), - [_H, op.STRING, "regex", op.STRING, "string", op.NOT_IREGEX], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "regex", op.STRING, "string", op.NOT_IREGEX], ) self.assertEqual( to_bytecode("match('test', 'e.*')"), - [_H, op.STRING, "e.*", op.STRING, "test", op.CALL, "match", 2], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "test", op.STRING, "e.*", op.CALL_GLOBAL, "match", 2], ) self.assertEqual( to_bytecode("match('test', '^e.*')"), - [_H, op.STRING, "^e.*", op.STRING, "test", op.CALL, "match", 2], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "test", op.STRING, "^e.*", op.CALL_GLOBAL, "match", 2], ) self.assertEqual( to_bytecode("match('test', 'x.*')"), - [_H, op.STRING, "x.*", op.STRING, "test", op.CALL, "match", 2], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "test", op.STRING, "x.*", op.CALL_GLOBAL, "match", 2], ) - self.assertEqual(to_bytecode("not('test')"), [_H, op.STRING, "test", op.NOT]) - self.assertEqual(to_bytecode("not 'test'"), [_H, op.STRING, "test", op.NOT]) + self.assertEqual(to_bytecode("not('test')"), [_H, HOGQL_BYTECODE_VERSION, op.STRING, "test", op.NOT]) + self.assertEqual(to_bytecode("not 'test'"), [_H, HOGQL_BYTECODE_VERSION, op.STRING, "test", op.NOT]) self.assertEqual( to_bytecode("or('test', 'test2')"), - [_H, op.STRING, "test2", op.STRING, "test", op.OR, 2], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "test", op.STRING, "test2", op.OR, 2], ) self.assertEqual( to_bytecode("and('test', 'test2')"), - [_H, op.STRING, "test2", op.STRING, "test", op.AND, 2], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "test", op.STRING, "test2", op.AND, 2], ) @pytest.mark.skip(reason="C++ parsing is not working for these cases yet.") def test_bytecode_objects(self): self.assertEqual( to_bytecode("[1, 2, 3]"), - [_H, op.INTEGER, 1, op.INTEGER, 2, op.INTEGER, 3, op.ARRAY, 3], + [_H, HOGQL_BYTECODE_VERSION, op.INTEGER, 1, op.INTEGER, 2, op.INTEGER, 3, op.ARRAY, 3], ) self.assertEqual( to_bytecode("[1, 2, 3][1]"), - [_H, op.INTEGER, 1, op.INTEGER, 2, op.INTEGER, 3, op.ARRAY, 3, op.INTEGER, 1, op.GET_PROPERTY, 1], + [ + _H, + HOGQL_BYTECODE_VERSION, + op.INTEGER, + 1, + op.INTEGER, + 2, + op.INTEGER, + 3, + op.ARRAY, + 3, + op.INTEGER, + 1, + op.GET_PROPERTY, + 1, + ], ) self.assertEqual( to_bytecode("{'a': 'b'}"), - [_H, op.STRING, "a", op.STRING, "b", op.DICT, 1], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "a", op.STRING, "b", op.DICT, 1], ) self.assertEqual( to_bytecode("{'a': 'b', 'c': 'd'}"), - [_H, op.STRING, "a", op.STRING, "b", op.STRING, "c", op.STRING, "d", op.DICT, 2], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "a", op.STRING, "b", op.STRING, "c", op.STRING, "d", op.DICT, 2], ) self.assertEqual( to_bytecode("{'a': 'b', 'c': {'a': 'b'}}"), [ _H, + HOGQL_BYTECODE_VERSION, op.STRING, "a", op.STRING, @@ -181,17 +204,17 @@ def test_bytecode_objects(self): ) self.assertEqual( to_bytecode("['a', 'b']"), - [_H, op.STRING, "a", op.STRING, "b", op.ARRAY, 2], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "a", op.STRING, "b", op.ARRAY, 2], ) self.assertEqual( to_bytecode("('a', 'b')"), - [_H, op.STRING, "a", op.STRING, "b", op.TUPLE, 2], + [_H, HOGQL_BYTECODE_VERSION, op.STRING, "a", op.STRING, "b", op.TUPLE, 2], ) def test_bytecode_create_not_implemented_error(self): with self.assertRaises(NotImplementedError) as e: to_bytecode("(select 1)") - self.assertEqual(str(e.exception), "BytecodeBuilder has no method visit_select_query") + self.assertEqual(str(e.exception), "BytecodeCompiler has no method visit_select_query") def test_bytecode_create_query_error(self): with self.assertRaises(QueryError) as e: diff --git a/posthog/hogql/test/test_metadata.py b/posthog/hogql/test/test_metadata.py index 1c0eb3ad20377..e19778e6f3f20 100644 --- a/posthog/hogql/test/test_metadata.py +++ b/posthog/hogql/test/test_metadata.py @@ -2,7 +2,7 @@ from posthog.hogql.metadata import get_hogql_metadata from posthog.models import PropertyDefinition, Cohort -from posthog.schema import HogQLMetadata, HogQLMetadataResponse, HogQLQuery +from posthog.schema import HogQLMetadata, HogQLMetadataResponse, HogQLQuery, HogLanguage from posthog.test.base import APIBaseTest, ClickhouseTestMixin from django.test import override_settings @@ -14,7 +14,7 @@ def _expr(self, query: str, table: str = "events", debug=True) -> HogQLMetadataR return get_hogql_metadata( query=HogQLMetadata( kind="HogQLMetadata", - language="hogQLExpr", + language=HogLanguage.HOG_QL_EXPR, query=query, sourceQuery=HogQLQuery(query=f"select * from {table}"), response=None, @@ -25,19 +25,21 @@ def _expr(self, query: str, table: str = "events", debug=True) -> HogQLMetadataR def _select(self, query: str) -> HogQLMetadataResponse: return get_hogql_metadata( - query=HogQLMetadata(kind="HogQLMetadata", language="hogQL", query=query, response=None), + query=HogQLMetadata(kind="HogQLMetadata", language=HogLanguage.HOG_QL, query=query, response=None), team=self.team, ) def _program(self, query: str, globals: Optional[dict] = None) -> HogQLMetadataResponse: return get_hogql_metadata( - query=HogQLMetadata(kind="HogQLMetadata", language="hog", query=query, globals=globals, response=None), + query=HogQLMetadata( + kind="HogQLMetadata", language=HogLanguage.HOG, query=query, globals=globals, response=None + ), team=self.team, ) def _template(self, query: str) -> HogQLMetadataResponse: return get_hogql_metadata( - query=HogQLMetadata(kind="HogQLMetadata", language="hogTemplate", query=query, response=None), + query=HogQLMetadata(kind="HogQLMetadata", language=HogLanguage.HOG_TEMPLATE, query=query, response=None), team=self.team, ) diff --git a/posthog/hogql/test/test_printer.py b/posthog/hogql/test/test_printer.py index 83efcb2aa2129..ee6bc9d084e3d 100644 --- a/posthog/hogql/test/test_printer.py +++ b/posthog/hogql/test/test_printer.py @@ -1,8 +1,11 @@ -from typing import Literal, Optional, cast +import json +from typing import Any, Literal, Optional, cast +from collections.abc import Mapping import pytest from django.test import override_settings +from posthog.clickhouse.client.execute import sync_execute from posthog.hogql import ast from posthog.hogql.constants import MAX_SELECT_RETURNED_ROWS, HogQLQuerySettings, HogQLGlobalSettings from posthog.hogql.context import HogQLContext @@ -20,7 +23,7 @@ PersonsOnEventsMode, PropertyGroupsMode, ) -from posthog.test.base import BaseTest, cleanup_materialized_columns +from posthog.test.base import BaseTest, _create_event, cleanup_materialized_columns class TestPrinter(BaseTest): @@ -358,6 +361,245 @@ def test_property_groups(self): "nullIf(nullIf(events.mat_foo, ''), 'null')", ) + def _test_property_group_comparison( + self, + input_expression: str, + expected_optimized_query: str | None, + expected_context_values: Mapping[str, Any] | None = None, + expected_skip_indexes_used: set[str] | None = None, + ) -> None: + def build_context(property_groups_mode: PropertyGroupsMode) -> HogQLContext: + return HogQLContext( + team_id=self.team.pk, + modifiers=HogQLQueryModifiers( + materializationMode=MaterializationMode.AUTO, + propertyGroupsMode=property_groups_mode, + ), + ) + + context = build_context(PropertyGroupsMode.OPTIMIZED) + printed_expr = self._expr(input_expression, context) + if expected_optimized_query is not None: + self.assertEqual(printed_expr, expected_optimized_query) + else: + unoptimized_context = build_context(PropertyGroupsMode.ENABLED) + unoptimized_expr = self._expr(input_expression, unoptimized_context) + # XXX: The placeholders used in the printed expression can vary between the direct and optimized variants, + # so we string format the context values back into the expression template. This isn't necessarily going to + # yield a valid ClickHouse expression, but it should generally be good enough to ensure the two expressions + # are the same. + self.assertEqual(printed_expr % context.values, unoptimized_expr % unoptimized_context.values) + + if expected_context_values is not None: + self.assertDictContainsSubset(expected_context_values, context.values) + + if expected_skip_indexes_used: + # The table needs some data to be able get a `EXPLAIN` result that includes index information -- otherwise + # the query is optimized to read from `NullSource` which doesn't do us much good here... + for _ in range(10): + _create_event(team=self.team, distinct_id="distinct_id", event="event") + + def _find_node(node, condition): + """Find the first node in a query plan meeting a given condition (using depth-first search.)""" + if condition(node): + return node + else: + for child in node.get("Plans", []): + result = _find_node(child, condition) + if result is not None: + return result + + [[raw_explain_result]] = sync_execute( + f"EXPLAIN indexes = 1, json = 1 SELECT count() FROM events WHERE {printed_expr}", + context.values, + ) + read_from_merge_tree_step = _find_node( + json.loads(raw_explain_result)[0]["Plan"], + condition=lambda node: node["Node Type"] == "ReadFromMergeTree", + ) + self.assertTrue( + expected_skip_indexes_used.issubset( + {index["Name"] for index in read_from_merge_tree_step.get("Indexes", []) if index["Type"] == "Skip"} + ), + ) + + def test_property_groups_optimized_basic_equality_comparisons(self) -> None: + # Comparing against a (non-empty) string value lets us avoid checking if the key exists or not, and lets us use + # the bloom filter indices on both keys and values to optimize the comparison operation. + self._test_property_group_comparison( + "properties.key = 'value'", + "equals(events.properties_group_custom[%(hogql_val_0)s], %(hogql_val_1)s)", + {"hogql_val_0": "key", "hogql_val_1": "value"}, + expected_skip_indexes_used={"properties_group_custom_keys_bf", "properties_group_custom_values_bf"}, + ) + self._test_property_group_comparison( + "'value' = properties.key", + "equals(events.properties_group_custom[%(hogql_val_0)s], %(hogql_val_1)s)", + {"hogql_val_0": "key", "hogql_val_1": "value"}, + expected_skip_indexes_used={"properties_group_custom_keys_bf", "properties_group_custom_values_bf"}, + ) + self._test_property_group_comparison( + "equals(properties.key, 'value')", + "equals(events.properties_group_custom[%(hogql_val_0)s], %(hogql_val_1)s)", + {"hogql_val_0": "key", "hogql_val_1": "value"}, + expected_skip_indexes_used={"properties_group_custom_keys_bf", "properties_group_custom_values_bf"}, + ) + + # TODO: We'll want to eventually support this type of expression where the right hand side is a non-``Nullable`` + # value, since this would allow expressions that only reference constant values to also use the appropriate + # index, but for right now we only want to optimize comparisons to constant values directly for simplicity. + self._test_property_group_comparison("properties.key = lower('value')", None) + + # The opposite case as above: ``Nullable`` values should _not_ be optimized (because we don't know which + # optimization to apply). + self._test_property_group_comparison("properties.key = nullIf('a', 'a')", None) + + # ... unless we can distinguish ``Nullable(Nothing)`` from ``Nullable(*)`` -- this _could_ be safely optimized. + self._test_property_group_comparison("properties.key = lower(NULL)", None) + + def test_property_groups_optimized_empty_string_equality_comparisons(self) -> None: + # Keys that don't exist in a map return default values for the type -- in our case empty strings -- so we need + # to check whether or not the key exists in the map *and* compare the value in the map is the empty string or + # not. We can still utilize the bloom filter index on keys, but the empty string isn't stored in the bloom + # filter so it won't be used here. + self._test_property_group_comparison( + "properties.key = ''", + "and(has(events.properties_group_custom, %(hogql_val_0)s), equals(events.properties_group_custom[%(hogql_val_0)s], %(hogql_val_1)s))", + {"hogql_val_0": "key", "hogql_val_1": ""}, + expected_skip_indexes_used={"properties_group_custom_keys_bf"}, + ) + self._test_property_group_comparison( + "equals(properties.key, '')", + "and(has(events.properties_group_custom, %(hogql_val_0)s), equals(events.properties_group_custom[%(hogql_val_0)s], %(hogql_val_1)s))", + {"hogql_val_0": "key", "hogql_val_1": ""}, + expected_skip_indexes_used={"properties_group_custom_keys_bf"}, + ) + + def test_property_groups_optimized_null_comparisons(self) -> None: + # NOT NULL comparisons should check to see if the key exists within the map (and should use the bloom filter to + # optimize the check), but do not need to load the values subcolumn. + self._test_property_group_comparison( + "properties.key is not null", + "has(events.properties_group_custom, %(hogql_val_0)s)", + {"hogql_val_0": "key"}, + expected_skip_indexes_used={"properties_group_custom_keys_bf"}, + ) + self._test_property_group_comparison( + "properties.key != null", + "has(events.properties_group_custom, %(hogql_val_0)s)", + {"hogql_val_0": "key"}, + expected_skip_indexes_used={"properties_group_custom_keys_bf"}, + ) + self._test_property_group_comparison( + "isNotNull(properties.key)", + "has(events.properties_group_custom, %(hogql_val_0)s)", + {"hogql_val_0": "key"}, + expected_skip_indexes_used={"properties_group_custom_keys_bf"}, + ) + + # NULL comparisons don't really benefit from the bloom filter index like NOT NULL comparisons do, but like + # above, only need to check the keys subcolumn and not the values subcolumn. + self._test_property_group_comparison( + "properties.key is null", + "not(has(events.properties_group_custom, %(hogql_val_0)s))", + {"hogql_val_0": "key"}, + ) + self._test_property_group_comparison( + "properties.key = null", + "not(has(events.properties_group_custom, %(hogql_val_0)s))", + {"hogql_val_0": "key"}, + ) + self._test_property_group_comparison( + "isNull(properties.key)", + "not(has(events.properties_group_custom, %(hogql_val_0)s))", + {"hogql_val_0": "key"}, + ) + + def test_property_groups_optimized_in_comparisons(self) -> None: + # The IN operator works much like equality when the right hand side of the expression is all constants. Like + # equality, it also needs to handle the empty string special case. + self._test_property_group_comparison( + "properties.key IN ('a', 'b')", + "in(events.properties_group_custom[%(hogql_val_0)s], tuple(%(hogql_val_1)s, %(hogql_val_2)s))", + {"hogql_val_0": "key", "hogql_val_1": "a", "hogql_val_2": "b"}, + expected_skip_indexes_used={"properties_group_custom_keys_bf", "properties_group_custom_values_bf"}, + ) + self._test_property_group_comparison( + "properties.key IN 'a'", # strange, but syntactically valid + "in(events.properties_group_custom[%(hogql_val_0)s], %(hogql_val_1)s)", + {"hogql_val_0": "key", "hogql_val_1": "a"}, + expected_skip_indexes_used={"properties_group_custom_keys_bf", "properties_group_custom_values_bf"}, + ) + self._test_property_group_comparison( + "properties.key IN ('a', 'b', '')", + ( + "or(" + "in(events.properties_group_custom[%(hogql_val_0)s], tuple(%(hogql_val_1)s, %(hogql_val_2)s)), " + "and(has(events.properties_group_custom, %(hogql_val_0)s), equals(events.properties_group_custom[%(hogql_val_0)s], %(hogql_val_3)s))" + ")" + ), + {"hogql_val_0": "key", "hogql_val_1": "a", "hogql_val_2": "b", "hogql_val_3": ""}, + expected_skip_indexes_used={"properties_group_custom_keys_bf"}, + ) + self._test_property_group_comparison( + "properties.key IN ''", # strange, but syntactically valid + "and(has(events.properties_group_custom, %(hogql_val_0)s), equals(events.properties_group_custom[%(hogql_val_0)s], %(hogql_val_1)s))", + {"hogql_val_0": "key", "hogql_val_1": ""}, + expected_skip_indexes_used={"properties_group_custom_keys_bf"}, + ) + + # NULL values are never equal. While this differs from the behavior of the equality operator above, it is + # consistent with how ClickHouse treats these values: + # https://clickhouse.com/docs/en/sql-reference/operators/in#null-processing + self._test_property_group_comparison("properties.key in NULL", "0") + self._test_property_group_comparison("properties.key in (NULL)", "0") + self._test_property_group_comparison("properties.key in (NULL, NULL, NULL)", "0") + self._test_property_group_comparison( + "properties.key IN ('a', 'b', NULL)", + "in(events.properties_group_custom[%(hogql_val_0)s], tuple(%(hogql_val_1)s, %(hogql_val_2)s))", + {"hogql_val_0": "key", "hogql_val_1": "a", "hogql_val_2": "b"}, + expected_skip_indexes_used={"properties_group_custom_keys_bf", "properties_group_custom_values_bf"}, + ) + self._test_property_group_comparison( + "properties.key IN ('', NULL)", + "and(has(events.properties_group_custom, %(hogql_val_0)s), equals(events.properties_group_custom[%(hogql_val_0)s], %(hogql_val_1)s))", + {"hogql_val_0": "key", "hogql_val_1": ""}, + expected_skip_indexes_used={"properties_group_custom_keys_bf"}, + ) + + # Only direct constant comparison is supported for now -- see above. + self._test_property_group_comparison("properties.key in lower('value')", None) + self._test_property_group_comparison("properties.key in (lower('a'), lower('b'))", None) + + def test_property_groups_select_with_aliases(self): + def build_context(property_groups_mode: PropertyGroupsMode) -> HogQLContext: + return HogQLContext( + team_id=self.team.pk, + enable_select_queries=True, + modifiers=HogQLQueryModifiers( + materializationMode=MaterializationMode.AUTO, + propertyGroupsMode=property_groups_mode, + ), + ) + + parsed = parse_select("SELECT properties.file_type AS ft FROM events WHERE ft = 'image/svg'") + printed = print_ast(parsed, build_context(PropertyGroupsMode.OPTIMIZED), dialect="clickhouse") + assert printed == ( + "SELECT has(events.properties_group_custom, %(hogql_val_0)s) ? events.properties_group_custom[%(hogql_val_0)s] : null AS ft " + "FROM events " + f"WHERE and(equals(events.team_id, {self.team.pk}), equals(events.properties_group_custom[%(hogql_val_1)s], %(hogql_val_2)s)) " + "LIMIT 50000" + ) + + # TODO: Ideally we'd be able to optimize queries that compare aliases, but this is a bit tricky since we need + # the ability to resolve the field back to the aliased expression (if one exists) to determine whether or not + # the condition can be optimized (and possibly just inline the aliased value to make things easier for the + # analyzer.) Until then, this should just use the direct (simple) property group access method. + parsed = parse_select("SELECT properties.file_type AS ft, 'image/svg' as ft2 FROM events WHERE ft = ft2") + assert print_ast(parsed, build_context(PropertyGroupsMode.OPTIMIZED), dialect="clickhouse") == print_ast( + parsed, build_context(PropertyGroupsMode.ENABLED), dialect="clickhouse" + ) + def test_methods(self): self.assertEqual(self._expr("count()"), "count()") self.assertEqual(self._expr("count(distinct event)"), "count(DISTINCT events.event)") @@ -398,7 +640,7 @@ def test_expr_parse_errors(self): ) self._assert_expr_error( "quantile()(event)", - "Aggregation 'quantile' requires parameters in addition to arguments", + "Aggregation 'quantile' expects 1 parameter, found 0", ) self._assert_expr_error( "quantile(0.5, 2)(event)", @@ -444,12 +686,13 @@ def test_expr_parse_errors_poe_off(self): def test_expr_syntax_errors(self): self._assert_expr_error("(", "no viable alternative at input '('") - self._assert_expr_error("())", "no viable alternative at input '()'") + self._assert_expr_error("())", "mismatched input ')' expecting '->'") self._assert_expr_error("(3 57", "no viable alternative at input '(3 57'") self._assert_expr_error("select query from events", "mismatched input 'query' expecting ") self._assert_expr_error("this makes little sense", "mismatched input 'makes' expecting ") self._assert_expr_error("1;2", "mismatched input ';' expecting ") - self._assert_expr_error("b.a(bla)", "mismatched input '(' expecting ") + self._assert_expr_error("b.a(bla)", "You can only call simple functions in HogQL, not expressions") + self._assert_expr_error("a -> { print(2) }", "You can not use blocks in HogQL") def test_logic(self): self.assertEqual( diff --git a/posthog/hogql/test/test_timings.py b/posthog/hogql/test/test_timings.py index cfb2259157afa..6e01180e7a62a 100644 --- a/posthog/hogql/test/test_timings.py +++ b/posthog/hogql/test/test_timings.py @@ -15,9 +15,6 @@ class TestHogQLTimings(BaseTest): def setUp(self): counter_values[0] = 0 - def assertAlmostEquals(self, a, b, epsilon=EPSILON): - self.assertTrue(abs(a - b) < epsilon, f"{a} != {b} within {epsilon}") - def test_basic_timing(self): with patch("posthog.hogql.timings.perf_counter", fake_perf_counter): timings = HogQLTimings() diff --git a/posthog/hogql/visitor.py b/posthog/hogql/visitor.py index 025ecc5f52e92..2a628181ae225 100644 --- a/posthog/hogql/visitor.py +++ b/posthog/hogql/visitor.py @@ -102,6 +102,11 @@ def visit_call(self, node: ast.Call): for expr in node.params: self.visit(expr) + def visit_expr_call(self, node: ast.ExprCall): + self.visit(node.expr) + for expr in node.args: + self.visit(expr) + def visit_sample_expr(self, node: ast.SampleExpr): self.visit(node.sample_value) self.visit(node.offset_value) @@ -507,6 +512,15 @@ def visit_call(self, node: ast.Call): distinct=node.distinct, ) + def visit_expr_call(self, node: ast.ExprCall): + return ast.ExprCall( + start=None if self.clear_locations else node.start, + end=None if self.clear_locations else node.end, + type=None if self.clear_types else node.type, + expr=self.visit(node.expr), + args=[self.visit(arg) for arg in node.args], + ) + def visit_ratio_expr(self, node: ast.RatioExpr): return ast.RatioExpr( start=None if self.clear_locations else node.start, diff --git a/posthog/hogql_queries/insights/funnels/funnel_query_context.py b/posthog/hogql_queries/insights/funnels/funnel_query_context.py index 8c280e272dbe3..ef6cf57af9563 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_query_context.py +++ b/posthog/hogql_queries/insights/funnels/funnel_query_context.py @@ -2,7 +2,6 @@ from posthog.hogql.constants import LimitContext from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.insights.query_context import QueryContext -from posthog.models.filters.mixins.utils import cached_property from posthog.models.property.util import box_value from posthog.models.team.team import Team from posthog.schema import ( @@ -39,6 +38,8 @@ class FunnelQueryContext(QueryContext): includeProperties: list[str] includeFinalMatchingEvents: Optional[bool] + max_steps_override: int | None = None + def __init__( self, query: FunnelsQuery, @@ -105,6 +106,8 @@ def __init__( self.actorsQuery = None - @cached_property + @property def max_steps(self) -> int: + if self.max_steps_override is not None: + return self.max_steps_override return len(self.query.series) diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends.py b/posthog/hogql_queries/insights/funnels/funnel_trends.py index ad7a67d1207d6..3c69fe7229272 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_trends.py +++ b/posthog/hogql_queries/insights/funnels/funnel_trends.py @@ -120,16 +120,15 @@ def _format_single_summary(self, summary): labels.append(timestamp.strftime(HUMAN_READABLE_TIMESTAMP_FORMAT)) return {"count": count, "data": data, "days": days, "labels": labels} - def get_query(self) -> ast.SelectQuery: - team, interval, query, now = self.context.team, self.context.interval, self.context.query, self.context.now - - date_range = QueryDateRange( - date_range=query.dateRange, - team=team, - interval=query.interval, - now=now, + def _date_range(self): + return QueryDateRange( + date_range=self.context.query.dateRange, + team=self.context.team, + interval=self.context.query.interval, + now=self.context.now, ) + def get_query(self) -> ast.SelectQuery: step_counts = self.get_step_counts_without_aggregation_query() # Expects multiple rows for same person, first event time, steps taken. @@ -138,12 +137,6 @@ def get_query(self) -> ast.SelectQuery: reached_to_step_count_condition, _, ) = self.get_steps_reached_conditions() - interval_func = get_interval_func_ch(interval.value) - - if date_range.date_from() is None: - _date_from = get_earliest_timestamp(team.pk) - else: - _date_from = date_range.date_from() breakdown_clause = self._get_breakdown_prop_expr() @@ -154,52 +147,12 @@ def get_query(self) -> ast.SelectQuery: *breakdown_clause, ] - formatted_date_from = (_date_from.strftime("%Y-%m-%d %H:%M:%S"),) - formatted_date_to = (date_range.date_to().strftime("%Y-%m-%d %H:%M:%S"),) - date_from_as_hogql = ast.Call( - name="assumeNotNull", - args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_from))])], - ) - date_to_as_hogql = ast.Call( - name="assumeNotNull", - args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_to))])], - ) data_select_from = ast.JoinExpr(table=step_counts) data_group_by: list[ast.Expr] = [ast.Field(chain=["entrance_period_start"]), *breakdown_clause] data_query = ast.SelectQuery(select=data_select, select_from=data_select_from, group_by=data_group_by) - fill_select: list[ast.Expr] = [ - ast.Alias( - alias="entrance_period_start", - expr=ast.ArithmeticOperation( - left=get_start_of_interval_hogql(interval.value, team=team, source=date_from_as_hogql), - right=ast.Call(name=interval_func, args=[ast.Field(chain=["number"])]), - op=ast.ArithmeticOperationOp.Add, - ), - ), - ] - fill_select_from = ast.JoinExpr( - table=ast.Field(chain=["numbers"]), - table_args=[ - ast.ArithmeticOperation( - left=ast.Call( - name="dateDiff", - args=[ - ast.Constant(value=interval.value), - get_start_of_interval_hogql(interval.value, team=team, source=date_from_as_hogql), - get_start_of_interval_hogql(interval.value, team=team, source=date_to_as_hogql), - ], - ), - right=ast.Constant(value=1), - op=ast.ArithmeticOperationOp.Add, - ) - ], - alias="period_offsets", - ) - fill_query = ast.SelectQuery( - select=fill_select, - select_from=fill_select_from, - ) + fill_query = self._get_fill_query() + fill_join = ast.JoinExpr( table=data_query, alias="data", @@ -254,7 +207,7 @@ def get_query(self) -> ast.SelectQuery: ) breakdown_limit = self.get_breakdown_limit() if breakdown_limit: - limit = min(breakdown_limit * len(date_range.all_values()), limit) + limit = min(breakdown_limit * len(self._date_range().all_values()), limit) else: select = [ ast.Field(chain=["fill", "entrance_period_start"]), @@ -276,6 +229,63 @@ def get_query(self) -> ast.SelectQuery: limit=ast.Constant(value=limit), # increased limit (default 100) for hourly breakdown ) + # The fill query returns all the start_interval dates in the response + def _get_fill_query(self) -> str: + team, interval = self.context.team, self.context.interval + + date_range = self._date_range() + + if date_range.date_from() is None: + _date_from = get_earliest_timestamp(team.pk) + else: + _date_from = date_range.date_from() + + formatted_date_from = (_date_from.strftime("%Y-%m-%d %H:%M:%S"),) + formatted_date_to = (date_range.date_to().strftime("%Y-%m-%d %H:%M:%S"),) + date_from_as_hogql = ast.Call( + name="assumeNotNull", + args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_from))])], + ) + date_to_as_hogql = ast.Call( + name="assumeNotNull", + args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=formatted_date_to))])], + ) + interval_func = get_interval_func_ch(interval.value) + + fill_select: list[ast.Expr] = [ + ast.Alias( + alias="entrance_period_start", + expr=ast.ArithmeticOperation( + left=get_start_of_interval_hogql(interval.value, team=team, source=date_from_as_hogql), + right=ast.Call(name=interval_func, args=[ast.Field(chain=["number"])]), + op=ast.ArithmeticOperationOp.Add, + ), + ), + ] + fill_select_from = ast.JoinExpr( + table=ast.Field(chain=["numbers"]), + table_args=[ + ast.ArithmeticOperation( + left=ast.Call( + name="dateDiff", + args=[ + ast.Constant(value=interval.value), + get_start_of_interval_hogql(interval.value, team=team, source=date_from_as_hogql), + get_start_of_interval_hogql(interval.value, team=team, source=date_to_as_hogql), + ], + ), + right=ast.Constant(value=1), + op=ast.ArithmeticOperationOp.Add, + ) + ], + alias="period_offsets", + ) + fill_query = ast.SelectQuery( + select=fill_select, + select_from=fill_select_from, + ) + return fill_query + def get_step_counts_without_aggregation_query( self, *, specific_entrance_period_start: Optional[datetime] = None ) -> ast.SelectQuery: diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py new file mode 100644 index 0000000000000..30b7fb48c2488 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py @@ -0,0 +1,160 @@ +from typing import cast + +from posthog.hogql import ast +from posthog.hogql.constants import HogQLQuerySettings +from posthog.hogql.parser import parse_select +from posthog.hogql_queries.insights.funnels import FunnelTrends +from posthog.hogql_queries.insights.utils.utils import get_start_of_interval_hogql_str +from posthog.schema import BreakdownType, BreakdownAttributionType +from posthog.utils import DATERANGE_MAP + +TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S" +HUMAN_READABLE_TIMESTAMP_FORMAT = "%-d-%b-%Y" + + +class FunnelTrendsUDF(FunnelTrends): + def get_step_counts_query(self): + max_steps = self.context.max_steps + return self._get_step_counts_query( + outer_select=[ + *self._get_matching_event_arrays(max_steps), + ], + inner_select=[ + *self._get_matching_events(max_steps), + ], + ) + + def conversion_window_limit(self) -> int: + return int( + self.context.funnelWindowInterval * DATERANGE_MAP[self.context.funnelWindowIntervalUnit].total_seconds() + ) + + def get_query(self) -> ast.SelectQuery: + # If they're asking for a "to_step" just truncate the funnel + funnelsFilter = self.context.funnelsFilter + max_steps = self.context.max_steps if funnelsFilter.funnelToStep is None else funnelsFilter.funnelToStep + 1 + self.context.max_steps_override = max_steps + + if self.context.funnelsFilter.funnelOrderType == "strict": + inner_event_query = self._get_inner_event_query_for_udf( + entity_name="events", skip_step_filter=True, skip_entity_filter=True + ) + else: + inner_event_query = self._get_inner_event_query_for_udf(entity_name="events") + + default_breakdown_selector = "[]" if self._query_has_array_breakdown() else "''" + + # stores the steps as an array of integers from 1 to max_steps + # so if the event could be step_0, step_1 or step_4, it looks like [1,2,0,0,5] + + # Each event is going to be a set of steps or it's going to be a set of exclusions. It can't be both. + steps = ",".join([f"{i + 1} * step_{i}" for i in range(self.context.max_steps)]) + + # this will error if they put in a bad exclusion + exclusions = "" + if getattr(self.context.funnelsFilter, "exclusions", None): + exclusions = "".join([f",-{i + 1} * exclusion_{i}" for i in range(1, self.context.max_steps)]) + + # Todo: Make this work for breakdowns + if self.context.breakdownType == BreakdownType.COHORT: + fn = "aggregate_funnel_cohort_trends" + breakdown_prop = ", prop" + elif self._query_has_array_breakdown(): + fn = "aggregate_funnel_array_trends" + breakdown_prop = "" + else: + fn = "aggregate_funnel_trends" + breakdown_prop = "" + + prop_selector = "prop" if self.context.breakdown else default_breakdown_selector + prop_vals = "groupUniqArray(prop)" if self.context.breakdown else f"[{default_breakdown_selector}]" + + breakdown_attribution_string = f"{self.context.breakdownAttributionType}{f'_{self.context.funnelsFilter.breakdownAttributionValue}' if self.context.breakdownAttributionType == BreakdownAttributionType.STEP else ''}" + + from_step = funnelsFilter.funnelFromStep or 0 + + inner_select = parse_select( + f""" + SELECT + arrayJoin({fn}( + {from_step}, + {max_steps}, + {self.conversion_window_limit()}, + '{breakdown_attribution_string}', + '{self.context.funnelsFilter.funnelOrderType}', + {prop_vals}, + arraySort(t -> t.1, groupArray(tuple(toFloat(timestamp), {get_start_of_interval_hogql_str(self.context.interval.value, team=self.context.team, source='timestamp')}, {prop_selector}, arrayFilter((x) -> x != 0, [{steps}{exclusions}])))) + )) as af_tuple, + toTimeZone(af_tuple.1, '{self.context.team.timezone}') as entrance_period_start, + af_tuple.2 as success_bool, + af_tuple.3 as breakdown + FROM {{inner_event_query}} + GROUP BY aggregation_target{breakdown_prop} + """, + {"inner_event_query": inner_event_query}, + ) + # This is necessary so clickhouse doesn't truncate timezone information when passing datetimes to python + inner_select.settings = HogQLQuerySettings(date_time_output_format="iso", date_time_input_format="best_effort") + + conversion_rate_expr = ( + "if(reached_from_step_count > 0, round(reached_to_step_count / reached_from_step_count * 100, 2), 0)" + ) + + fill_query = self._get_fill_query() + + limit = 1_000 + if self.context.breakdown: + breakdown_limit = self.get_breakdown_limit() + if breakdown_limit: + limit = min(breakdown_limit * len(self._date_range().all_values()), limit) + + s = parse_select( + f""" + SELECT + fill.entrance_period_start AS entrance_period_start, + sumIf(data.reached_from_step_count, ifNull(equals(fill.entrance_period_start, data.entrance_period_start), isNull(fill.entrance_period_start) and isNull(data.entrance_period_start))) AS reached_from_step_count, + sumIf(data.reached_to_step_count, ifNull(equals(fill.entrance_period_start, data.entrance_period_start), isNull(fill.entrance_period_start) and isNull(data.entrance_period_start))) AS reached_to_step_count, + if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, + data.prop AS prop + FROM + ({{fill_query}}) as fill + CROSS JOIN (SELECT + entrance_period_start as entrance_period_start, + countIf(success_bool != 0) as reached_from_step_count, + countIf(success_bool = 1) as reached_to_step_count, + breakdown as prop + FROM + ({{inner_select}}) + GROUP BY entrance_period_start, breakdown) as data + GROUP BY + fill.entrance_period_start, + data.prop + ORDER BY + sum(reached_from_step_count) OVER (PARTITION BY data.prop) DESC, + data.prop DESC, + fill.entrance_period_start ASC + LIMIT {limit} + """, + {"fill_query": fill_query, "inner_select": inner_select}, + ) + else: + s = parse_select( + f""" + SELECT + fill.entrance_period_start as entrance_period_start, + countIf(success_bool != 0) as reached_from_step_count, + countIf(success_bool = 1) as reached_to_step_count, + {conversion_rate_expr} as conversion_rate, + breakdown as prop + FROM + ({{inner_select}}) as data + RIGHT OUTER JOIN + ({{fill_query}}) as fill + ON data.entrance_period_start = fill.entrance_period_start + GROUP BY entrance_period_start, data.breakdown + ORDER BY entrance_period_start + LIMIT {limit} + """, + {"fill_query": fill_query, "inner_select": inner_select}, + ) + return cast(ast.SelectQuery, s) diff --git a/posthog/hogql_queries/insights/funnels/funnels_query_runner.py b/posthog/hogql_queries/insights/funnels/funnels_query_runner.py index ea439d4c89493..0c01786f2f073 100644 --- a/posthog/hogql_queries/insights/funnels/funnels_query_runner.py +++ b/posthog/hogql_queries/insights/funnels/funnels_query_runner.py @@ -17,6 +17,7 @@ from posthog.hogql_queries.insights.funnels.funnel_query_context import FunnelQueryContext from posthog.hogql_queries.insights.funnels.funnel_time_to_convert import FunnelTimeToConvert from posthog.hogql_queries.insights.funnels.funnel_trends import FunnelTrends +from posthog.hogql_queries.insights.funnels.funnel_trends_udf import FunnelTrendsUDF from posthog.hogql_queries.insights.funnels.utils import get_funnel_actor_class, get_funnel_order_class from posthog.hogql_queries.legacy_compatibility.feature_flag import insight_funnels_use_udf from posthog.hogql_queries.query_runner import QueryRunner @@ -29,6 +30,7 @@ FunnelsQuery, FunnelsQueryResponse, HogQLQueryModifiers, + StepOrderValue, ) @@ -115,8 +117,14 @@ def funnel_order_class(self): def funnel_class(self): funnelVizType = self.context.funnelsFilter.funnelVizType + use_udf = insight_funnels_use_udf(self.team) + if funnelVizType == FunnelVizType.TRENDS: - return FunnelTrends(context=self.context, **self.kwargs) + return ( + FunnelTrendsUDF(context=self.context, **self.kwargs) + if use_udf and self.context.funnelsFilter.funnelOrderType != StepOrderValue.UNORDERED + else FunnelTrends(context=self.context, **self.kwargs) + ) elif funnelVizType == FunnelVizType.TIME_TO_CONVERT: return FunnelTimeToConvert(context=self.context) else: diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr index 1dbe7a3fd269f..9869167471e0e 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr @@ -1,1209 +1,5 @@ # serializer version: 1 -# name: TestFOSSFunnel.test_funnel_conversion_window_seconds - ''' - SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - countIf(ifNull(equals(steps, 3), 0)) AS step_3, - avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time, - avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time, - median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time, - median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - median(step_2_conversion_time) AS step_2_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalSecond(15))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'step one'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'step two'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(equals(e.event, 'step three'), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFOSSFunnel.test_funnel_conversion_window_seconds.1 - ''' - SELECT persons.id, - persons.id AS id, - persons.created_at AS created_at, - 1 - FROM - (SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - median(step_2_conversion_time) AS step_2_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalSecond(15))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'step one'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'step two'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(equals(e.event, 'step three'), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [2, 3]), 0) - ORDER BY aggregation_target ASC) AS source - INNER JOIN - (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, - person.id AS id - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.created_at DESC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFOSSFunnel.test_funnel_events_with_person_on_events_v2 - ''' - - SELECT DISTINCT person_id - FROM events - WHERE team_id = 2 - AND distinct_id = 'stopped_after_pay' - ''' -# --- -# name: TestFOSSFunnel.test_funnel_events_with_person_on_events_v2.1 - ''' - SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - countIf(ifNull(equals(steps, 3), 0)) AS step_3, - avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time, - avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time, - median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time, - median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - median(step_2_conversion_time) AS step_2_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, '$autocapture'), match(e.elements_chain, '(^|;)button(\\.|$|;|:)'), arrayExists(x -> ifNull(equals(x, 'Pay $10'), 0), e.elements_chain_texts)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(and(equals(e.event, '$autocapture'), match(e.elements_chain, '(^|;)a(\\.|$|;|:)'), equals(e.elements_chain_href, '/movie')), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 - FROM events AS e - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, - person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 2) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2011-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2012-01-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$autocapture', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFOSSFunnel.test_funnel_with_precalculated_cohort_step_filter - ''' - - SELECT count(DISTINCT person_id) - FROM cohortpeople - WHERE team_id = 2 - AND cohort_id = 2 - AND version = NULL - ''' -# --- -# name: TestFOSSFunnel.test_funnel_with_precalculated_cohort_step_filter.1 - ''' - /* cohort_calculation: */ - SELECT count(DISTINCT person_id) - FROM cohortpeople - WHERE team_id = 2 - AND cohort_id = 2 - AND version = 0 - ''' -# --- -# name: TestFOSSFunnel.test_funnel_with_precalculated_cohort_step_filter.2 - ''' - SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time, - median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(and(equals(e.event, 'user signed up'), ifNull(in(e__pdi.person_id, - (SELECT cohortpeople.person_id AS person_id - FROM cohortpeople - WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2), equals(cohortpeople.version, 0)))), 0)), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFOSSFunnel.test_funnel_with_property_groups - ''' - SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - countIf(ifNull(equals(steps, 3), 0)) AS step_3, - avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time, - avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time, - median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time, - median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - median(step_2_conversion_time) AS step_2_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha.com'), 0)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha2.com'), 0)), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'age'), ''), 'null'), '^"|"$', '') AS properties___age - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFOSSFunnel.test_funnel_with_property_groups.1 - ''' - SELECT persons.id, - persons.id AS id, - persons.created_at AS created_at, - 1 - FROM - (SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - median(step_2_conversion_time) AS step_2_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha.com'), 0)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha2.com'), 0)), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'age'), ''), 'null'), '^"|"$', '') AS properties___age - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2, 3]), 0) - ORDER BY aggregation_target ASC) AS source - INNER JOIN - (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, - person.id AS id - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.created_at DESC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFOSSFunnel.test_funnel_with_property_groups.2 - ''' - SELECT persons.id, - persons.id AS id, - persons.created_at AS created_at, - 1 - FROM - (SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - median(step_2_conversion_time) AS step_2_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha.com'), 0)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha2.com'), 0)), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'age'), ''), 'null'), '^"|"$', '') AS properties___age - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [2, 3]), 0) - ORDER BY aggregation_target ASC) AS source - INNER JOIN - (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, - person.id AS id - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.created_at DESC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFOSSFunnel.test_funnel_with_property_groups.3 - ''' - SELECT persons.id, - persons.id AS id, - persons.created_at AS created_at, - 1 - FROM - (SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - median(step_2_conversion_time) AS step_2_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha.com'), 0)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(and(equals(e.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', ''), 'aloha2.com'), 0)), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'age'), ''), 'null'), '^"|"$', '') AS properties___age - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [3]), 0) - ORDER BY aggregation_target ASC) AS source - INNER JOIN - (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, - person.id AS id - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.created_at DESC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFOSSFunnel.test_funnel_with_static_cohort_step_filter - ''' - - SELECT count(DISTINCT person_id) - FROM person_static_cohort - WHERE team_id = 2 - AND cohort_id = 2 - ''' -# --- -# name: TestFOSSFunnel.test_funnel_with_static_cohort_step_filter.1 - ''' - SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time, - median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(and(equals(e.event, 'user signed up'), ifNull(in(e__pdi.person_id, - (SELECT person_static_cohort.person_id AS person_id - FROM person_static_cohort - WHERE and(equals(person_static_cohort.team_id, 2), equals(person_static_cohort.cohort_id, 2)))), 0)), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFOSSFunnel.test_timezones - ''' - SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time, - median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen - ''' - SELECT sum(step_1) AS step_1, - sum(step_2) AS step_2, - if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, - if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, - if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop - FROM - (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - groupArray(step_1_conversion_time) AS step_1_conversion_time_array, - prop AS prop, - row_number() OVER ( - ORDER BY step_2 DESC) AS row_number - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - prop AS prop, - min(step_1_conversion_time) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - prop AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop_basic AS prop_basic, - prop, - prop_vals AS prop_vals, - if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'sign up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, - prop_basic AS prop, - argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - GROUP BY prop) - GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step - ''' - SELECT sum(step_1) AS step_1, - sum(step_2) AS step_2, - if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, - if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, - if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop - FROM - (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - groupArray(step_1_conversion_time) AS step_1_conversion_time_array, - prop AS prop, - row_number() OVER ( - ORDER BY step_2 DESC) AS row_number - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - prop AS prop, - min(step_1_conversion_time) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - prop AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop_basic AS prop_basic, - prop_0 AS prop_0, - prop_1 AS prop_1, - prop, - prop_vals AS prop_vals, - prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'sign up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, - if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, - if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, - prop_1 AS prop, - groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) ARRAY - JOIN prop_vals AS prop - WHERE ifNull(notEquals(prop, []), isNotNull(prop) - or isNotNull([])))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - GROUP BY prop) - GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFunnelBreakdown.test_funnel_step_multiple_breakdown_snapshot +# name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen ''' SELECT sum(step_1) AS step_1, sum(step_2) AS step_2, @@ -1262,15 +58,15 @@ prop_basic AS prop_basic, prop, prop_vals AS prop_vals, - if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'sign up'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'buy'), 1, 0) AS step_1, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), ''), ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, prop_basic AS prop, argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals FROM events AS e @@ -1300,308 +96,26 @@ allow_experimental_analyzer=1 ''' # --- -# name: TestFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events - ''' - SELECT sum(step_1) AS step_1, - sum(step_2) AS step_2, - sum(step_3) AS step_3, - if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, - if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, - if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, - if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, - if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop - FROM - (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - countIf(ifNull(equals(steps, 3), 0)) AS step_3, - groupArray(step_1_conversion_time) AS step_1_conversion_time_array, - groupArray(step_2_conversion_time) AS step_2_conversion_time_array, - prop AS prop, - row_number() OVER ( - ORDER BY step_3 DESC) AS row_number - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - prop AS prop, - min(step_1_conversion_time) AS step_1_conversion_time, - min(step_2_conversion_time) AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - prop AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, - prop_basic AS prop_basic, - prop, - prop_vals AS prop_vals, - prop_vals AS prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'sign up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'play movie'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(equals(e.event, 'buy'), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, - ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, - prop_basic AS prop, - argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - GROUP BY prop) - GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 - ''' - SELECT sum(step_1) AS step_1, - sum(step_2) AS step_2, - sum(step_3) AS step_3, - if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, - if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, - if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, - if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, - if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop - FROM - (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - countIf(ifNull(equals(steps, 3), 0)) AS step_3, - groupArray(step_1_conversion_time) AS step_1_conversion_time_array, - groupArray(step_2_conversion_time) AS step_2_conversion_time_array, - prop AS prop, - row_number() OVER ( - ORDER BY step_3 DESC) AS row_number - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - prop AS prop, - min(step_1_conversion_time) AS step_1_conversion_time, - min(step_2_conversion_time) AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - prop AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, - prop_basic AS prop_basic, - prop, - prop_vals AS prop_vals, - prop_vals AS prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'sign up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'play movie'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(equals(e.event, 'buy'), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, - ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, - prop_basic AS prop, - argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - GROUP BY prop) - GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group +# name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step ''' SELECT sum(step_1) AS step_1, sum(step_2) AS step_2, - sum(step_3) AS step_3, if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, - if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, - if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, - if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop FROM (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, countIf(ifNull(equals(steps, 2), 0)) AS step_2, - countIf(ifNull(equals(steps, 3), 0)) AS step_3, groupArray(step_1_conversion_time) AS step_1_conversion_time_array, - groupArray(step_2_conversion_time) AS step_2_conversion_time_array, prop AS prop, row_number() OVER ( - ORDER BY step_3 DESC) AS row_number + ORDER BY step_2 DESC) AS row_number FROM (SELECT aggregation_target AS aggregation_target, steps AS steps, prop AS prop, prop AS prop, - min(step_1_conversion_time) AS step_1_conversion_time, - min(step_2_conversion_time) AS step_2_conversion_time + min(step_1_conversion_time) AS step_1_conversion_time FROM (SELECT aggregation_target AS aggregation_target, steps AS steps, @@ -1609,7 +123,6 @@ max(steps) OVER (PARTITION BY aggregation_target, prop) AS max_steps, step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time, prop AS prop FROM (SELECT aggregation_target AS aggregation_target, @@ -1618,12 +131,9 @@ latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, prop AS prop FROM (SELECT aggregation_target AS aggregation_target, @@ -1631,78 +141,47 @@ step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target, + min(latest_1) OVER (PARTITION BY aggregation_target, prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, prop AS prop FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, - prop AS prop + prop_basic AS prop_basic, + prop_0 AS prop_0, + prop_1 AS prop_1, + prop, + prop_vals AS prop_vals, + prop FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - prop AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - step_2 AS step_2, - latest_2 AS latest_2, - prop_basic AS prop_basic, - prop, - prop_vals AS prop_vals, - prop_vals AS prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'sign up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'play movie'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(equals(e.event, 'buy'), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, - ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, - prop_basic AS prop, - argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))))))) + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) ARRAY + JOIN prop_vals AS prop + WHERE ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([])))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps, @@ -1721,575 +200,3 @@ allow_experimental_analyzer=1 ''' # --- -# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.1 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 2 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 2 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.2 - ''' - - SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target, - steps, - avg(step_1_conversion_time) step_1_average_conversion_time_inner, - avg(step_2_conversion_time) step_2_average_conversion_time_inner, - median(step_1_conversion_time) step_1_median_conversion_time_inner, - median(step_2_conversion_time) step_2_median_conversion_time_inner , - prop - FROM - (SELECT aggregation_target, - steps, - max(steps) over (PARTITION BY aggregation_target, - prop) as max_steps, - step_1_conversion_time, - step_2_conversion_time , - prop - FROM - (SELECT *, - if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY - AND latest_1 <= latest_2 - AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , - if(isNotNull(latest_1) - AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, - if(isNotNull(latest_2) - AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, - prop - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - prop - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - latest_1, - step_2, - if(latest_2 < latest_1, NULL, latest_2) as latest_2 , - prop - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - pdi.person_id as aggregation_target, - pdi.person_id as person_id, - if(event = 'sign up', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'play movie', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'buy', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - INNER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 2 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 2 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 2 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 2 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))))) - WHERE step_0 = 1 )) - GROUP BY aggregation_target, - steps, - prop - HAVING steps = max_steps) - WHERE steps IN [1, 2, 3] - AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) - ORDER BY aggregation_target - LIMIT 100 - OFFSET 0 SETTINGS max_ast_elements=1000000, - max_expanded_ast_elements=1000000 - ''' -# --- -# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.3 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 2 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 2 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.4 - ''' - - SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target, - steps, - avg(step_1_conversion_time) step_1_average_conversion_time_inner, - avg(step_2_conversion_time) step_2_average_conversion_time_inner, - median(step_1_conversion_time) step_1_median_conversion_time_inner, - median(step_2_conversion_time) step_2_median_conversion_time_inner , - prop - FROM - (SELECT aggregation_target, - steps, - max(steps) over (PARTITION BY aggregation_target, - prop) as max_steps, - step_1_conversion_time, - step_2_conversion_time , - prop - FROM - (SELECT *, - if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY - AND latest_1 <= latest_2 - AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , - if(isNotNull(latest_1) - AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, - if(isNotNull(latest_2) - AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, - prop - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - prop - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - latest_1, - step_2, - if(latest_2 < latest_1, NULL, latest_2) as latest_2 , - prop - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - pdi.person_id as aggregation_target, - pdi.person_id as person_id, - if(event = 'sign up', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'play movie', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'buy', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - INNER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 2 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 2 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 2 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 2 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))))) - WHERE step_0 = 1 )) - GROUP BY aggregation_target, - steps, - prop - HAVING steps = max_steps) - WHERE steps IN [2, 3] - AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) - ORDER BY aggregation_target - LIMIT 100 - OFFSET 0 SETTINGS max_ast_elements=1000000, - max_expanded_ast_elements=1000000 - ''' -# --- -# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.5 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 2 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 2 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.6 - ''' - - SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target, - steps, - avg(step_1_conversion_time) step_1_average_conversion_time_inner, - avg(step_2_conversion_time) step_2_average_conversion_time_inner, - median(step_1_conversion_time) step_1_median_conversion_time_inner, - median(step_2_conversion_time) step_2_median_conversion_time_inner , - prop - FROM - (SELECT aggregation_target, - steps, - max(steps) over (PARTITION BY aggregation_target, - prop) as max_steps, - step_1_conversion_time, - step_2_conversion_time , - prop - FROM - (SELECT *, - if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY - AND latest_1 <= latest_2 - AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , - if(isNotNull(latest_1) - AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, - if(isNotNull(latest_2) - AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, - prop - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - prop - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - latest_1, - step_2, - if(latest_2 < latest_1, NULL, latest_2) as latest_2 , - prop - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - pdi.person_id as aggregation_target, - pdi.person_id as person_id, - if(event = 'sign up', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'play movie', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'buy', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - INNER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 2 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 2 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 2 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 2 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))))) - WHERE step_0 = 1 )) - GROUP BY aggregation_target, - steps, - prop - HAVING steps = max_steps) - WHERE steps IN [1, 2, 3] - AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) - ORDER BY aggregation_target - LIMIT 100 - OFFSET 0 SETTINGS max_ast_elements=1000000, - max_expanded_ast_elements=1000000 - ''' -# --- -# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.7 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 2 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 2 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.8 - ''' - - SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target, - steps, - avg(step_1_conversion_time) step_1_average_conversion_time_inner, - avg(step_2_conversion_time) step_2_average_conversion_time_inner, - median(step_1_conversion_time) step_1_median_conversion_time_inner, - median(step_2_conversion_time) step_2_median_conversion_time_inner , - prop - FROM - (SELECT aggregation_target, - steps, - max(steps) over (PARTITION BY aggregation_target, - prop) as max_steps, - step_1_conversion_time, - step_2_conversion_time , - prop - FROM - (SELECT *, - if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY - AND latest_1 <= latest_2 - AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , - if(isNotNull(latest_1) - AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, - if(isNotNull(latest_2) - AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, - prop - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - prop - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - latest_1, - step_2, - if(latest_2 < latest_1, NULL, latest_2) as latest_2 , - prop - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - pdi.person_id as aggregation_target, - pdi.person_id as person_id, - if(event = 'sign up', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'play movie', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'buy', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - INNER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 2 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 2 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 2 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 2 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))))) - WHERE step_0 = 1 )) - GROUP BY aggregation_target, - steps, - prop - HAVING steps = max_steps) - WHERE steps IN [2, 3] - AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) - ORDER BY aggregation_target - LIMIT 100 - OFFSET 0 SETTINGS max_ast_elements=1000000, - max_expanded_ast_elements=1000000 - ''' -# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_breakdowns_by_current_url.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_breakdowns_by_current_url.ambr deleted file mode 100644 index b507403f6e484..0000000000000 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_breakdowns_by_current_url.ambr +++ /dev/null @@ -1,195 +0,0 @@ -# serializer version: 1 -# name: TestFunnelBreakdownsByCurrentURL.test_breakdown_by_current_url - ''' - SELECT sum(step_1) AS step_1, - sum(step_2) AS step_2, - if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, - if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, - if(ifNull(less(row_number, 101), 0), prop, ['Other']) AS final_prop - FROM - (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - groupArray(step_1_conversion_time) AS step_1_conversion_time_array, - prop AS prop, - row_number() OVER ( - ORDER BY step_2 DESC) AS row_number - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - prop AS prop, - min(step_1_conversion_time) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - prop AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop_basic AS prop_basic, - prop, - prop_vals AS prop_vals, - if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'watched movie'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'terminate funnel'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [if(empty(replaceRegexpOne(ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', '')), ''), '[\\/?#]*$', '')), '/', replaceRegexpOne(ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$current_url'), ''), 'null'), '^"|"$', '')), ''), '[\\/?#]*$', ''))] AS prop_basic, - prop_basic AS prop, - argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-02 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-12 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('terminate funnel', 'watched movie'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - GROUP BY prop) - GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFunnelBreakdownsByCurrentURL.test_breakdown_by_pathname - ''' - SELECT sum(step_1) AS step_1, - sum(step_2) AS step_2, - if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, - if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, - if(ifNull(less(row_number, 101), 0), prop, ['Other']) AS final_prop - FROM - (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - groupArray(step_1_conversion_time) AS step_1_conversion_time_array, - prop AS prop, - row_number() OVER ( - ORDER BY step_2 DESC) AS row_number - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - prop AS prop, - min(step_1_conversion_time) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - prop AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop_basic AS prop_basic, - prop, - prop_vals AS prop_vals, - if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'watched movie'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'terminate funnel'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [if(empty(replaceRegexpOne(ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$pathname'), ''), 'null'), '^"|"$', '')), ''), '[\\/?#]*$', '')), '/', replaceRegexpOne(ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$pathname'), ''), 'null'), '^"|"$', '')), ''), '[\\/?#]*$', ''))] AS prop_basic, - prop_basic AS prop, - argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-02 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-12 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('terminate funnel', 'watched movie'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - GROUP BY prop) - GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr deleted file mode 100644 index 04a7ddc45281f..0000000000000 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr +++ /dev/null @@ -1,8140 +0,0 @@ -# serializer version: 1 -# name: TestClickhouseFunnelCorrelation.test_action_events_are_excluded_from_correlations - ''' - SELECT event.event AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM events AS event - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS event__pdi ON equals(event.distinct_id, event__pdi.distinct_id) - JOIN - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(and(equals(e.event, 'user signed up'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'val'), 0)), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, 'paid'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'val'), 0)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(event__pdi.person_id, funnel_actors.actor_id) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) - GROUP BY name - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(and(equals(e.event, 'user signed up'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'val'), 0)), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, 'paid'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'val'), 0)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties - ''' - SELECT concat(ifNull(toString((aggregation_target_with_props.prop).1), ''), '::', ifNull(toString((aggregation_target_with_props.prop).2), '')) AS name, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(equals(aggregation_target_with_props.steps, 2), 0)) AS success_count, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(notEquals(aggregation_target_with_props.steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - arrayJoin(arrayZip(['$browser'], [JSONExtractString(persons.person_props, '$browser')])) AS prop - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - JOIN - (SELECT persons.id AS id, - persons.properties AS person_props - FROM - (SELECT person.id AS id, - person.properties AS properties - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons) AS persons ON equals(persons.id, funnel_actors.actor_id)) AS aggregation_target_with_props - GROUP BY (aggregation_target_with_props.prop).1, (aggregation_target_with_props.prop).2 - HAVING ifNull(notIn((aggregation_target_with_props.prop).1, []), 0) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.1 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_1, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, latest_0 AS latest_0, latest_1 AS latest_1, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'paid'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.2 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2019-12-31 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, [''])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.3 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_1, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, latest_0 AS latest_0, latest_1 AS latest_1, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'paid'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.4 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2019-12-31 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, [''])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.5 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_1, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, latest_0 AS latest_0, latest_1 AS latest_1, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'paid'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.6 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2019-12-31 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, [''])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.7 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_1, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, latest_0 AS latest_0, latest_1 AS latest_1, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'paid'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties.8 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2019-12-31 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, [''])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties_materialized - ''' - SELECT concat(ifNull(toString((aggregation_target_with_props.prop).1), ''), '::', ifNull(toString((aggregation_target_with_props.prop).2), '')) AS name, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(equals(aggregation_target_with_props.steps, 2), 0)) AS success_count, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(notEquals(aggregation_target_with_props.steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - arrayJoin(arrayZip(['$browser'], [JSONExtractString(persons.person_props, '$browser')])) AS prop - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - JOIN - (SELECT persons.id AS id, - persons.properties AS person_props - FROM - (SELECT person.id AS id, - person.properties AS properties - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons) AS persons ON equals(persons.id, funnel_actors.actor_id)) AS aggregation_target_with_props - GROUP BY (aggregation_target_with_props.prop).1, (aggregation_target_with_props.prop).2 - HAVING ifNull(notIn((aggregation_target_with_props.prop).1, []), 0) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties_materialized.1 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - nullIf(nullIf(person.`pmat_$browser`, ''), 'null') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_1, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, latest_0 AS latest_0, latest_1 AS latest_1, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'paid'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.`pmat_$browser`, ''), 'null') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties_materialized.2 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2019-12-31 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, [''])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties_materialized.3 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - nullIf(nullIf(person.`pmat_$browser`, ''), 'null') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_1, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, latest_0 AS latest_0, latest_1 AS latest_1, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'paid'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.`pmat_$browser`, ''), 'null') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties_materialized.4 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2019-12-31 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, [''])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties_materialized.5 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - nullIf(nullIf(person.`pmat_$browser`, ''), 'null') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_1, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, latest_0 AS latest_0, latest_1 AS latest_1, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'paid'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.`pmat_$browser`, ''), 'null') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties_materialized.6 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2019-12-31 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, [''])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties_materialized.7 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - nullIf(nullIf(person.`pmat_$browser`, ''), 'null') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_1, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, latest_0 AS latest_0, latest_1 AS latest_1, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'paid'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.`pmat_$browser`, ''), 'null') AS `properties___$browser` - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_basic_funnel_correlation_with_properties_materialized.8 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2019-12-31 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, [''])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_event_properties_and_groups - ''' - SELECT concat(ifNull(toString(event_name), ''), '::', ifNull(toString((prop).1), ''), '::', ifNull(toString((prop).2), '')) AS name, - countDistinctIf(actor_id, ifNull(equals(steps, 2), 0)) AS success_count, - countDistinctIf(actor_id, ifNull(notEquals(steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - event.event AS event_name, - arrayJoin(JSONExtractKeysAndValues(event.properties, 'String')) AS prop - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_1` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_1`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), in(event.event, ['positively_related', 'negatively_related']))) - GROUP BY name - HAVING and(ifNull(greater(plus(success_count, failure_count), 2), 0), ifNull(notIn((prop).1, []), 0)) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_1` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_event_properties_and_groups_materialized - ''' - SELECT concat(ifNull(toString(event_name), ''), '::', ifNull(toString((prop).1), ''), '::', ifNull(toString((prop).2), '')) AS name, - countDistinctIf(actor_id, ifNull(equals(steps, 2), 0)) AS success_count, - countDistinctIf(actor_id, ifNull(notEquals(steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - event.event AS event_name, - arrayJoin(JSONExtractKeysAndValues(event.properties, 'String')) AS prop - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_1` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_1`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), in(event.event, ['positively_related', 'negatively_related']))) - GROUP BY name - HAVING and(ifNull(greater(plus(success_count, failure_count), 2), 0), ifNull(notIn((prop).1, []), 0)) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_1` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups - ''' - SELECT event.event AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) - GROUP BY name - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.1 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'positively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.2 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'positively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.3 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.4 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.5 - ''' - SELECT event.event AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'finance'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) - GROUP BY name - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'finance'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.6 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups.7 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2 - ''' - SELECT event.event AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) - GROUP BY name - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.1 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'positively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.2 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'positively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.3 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.4 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.5 - ''' - SELECT event.event AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'finance'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) - GROUP BY name - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'finance'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.6 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_events_and_groups_poe_v2.7 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups - ''' - SELECT concat(ifNull(toString((aggregation_target_with_props.prop).1), ''), '::', ifNull(toString((aggregation_target_with_props.prop).2), '')) AS name, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(equals(aggregation_target_with_props.steps, 2), 0)) AS success_count, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(notEquals(aggregation_target_with_props.steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - arrayJoin(arrayZip(['industry'], [JSONExtractString(groups_0.properties, 'industry')])) AS prop - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LEFT JOIN - (SELECT groups.key AS key, - groups.properties AS properties - FROM - (SELECT argMax(groups.group_properties, toTimeZone(groups._timestamp, 'UTC')) AS properties, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups - WHERE ifNull(equals(groups.index, 0), 0)) AS groups_0 ON equals(funnel_actors.actor_id, groups_0.key)) AS aggregation_target_with_props - GROUP BY (aggregation_target_with_props.prop).1, (aggregation_target_with_props.prop).2 - HAVING ifNull(notIn((aggregation_target_with_props.prop).1, []), 0) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups.1 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups.2 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups.3 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups.4 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups.5 - ''' - SELECT concat(ifNull(toString((aggregation_target_with_props.prop).1), ''), '::', ifNull(toString((aggregation_target_with_props.prop).2), '')) AS name, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(equals(aggregation_target_with_props.steps, 2), 0)) AS success_count, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(notEquals(aggregation_target_with_props.steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - arrayJoin(JSONExtractKeysAndValues(groups_0.properties, 'String')) AS prop - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LEFT JOIN - (SELECT groups.key AS key, - groups.properties AS properties - FROM - (SELECT argMax(groups.group_properties, toTimeZone(groups._timestamp, 'UTC')) AS properties, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups - WHERE ifNull(equals(groups.index, 0), 0)) AS groups_0 ON equals(funnel_actors.actor_id, groups_0.key)) AS aggregation_target_with_props - GROUP BY (aggregation_target_with_props.prop).1, (aggregation_target_with_props.prop).2 - HAVING ifNull(notIn((aggregation_target_with_props.prop).1, []), 0) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_materialized - ''' - SELECT concat(ifNull(toString((aggregation_target_with_props.prop).1), ''), '::', ifNull(toString((aggregation_target_with_props.prop).2), '')) AS name, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(equals(aggregation_target_with_props.steps, 2), 0)) AS success_count, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(notEquals(aggregation_target_with_props.steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - arrayJoin(arrayZip(['industry'], [JSONExtractString(groups_0.properties, 'industry')])) AS prop - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LEFT JOIN - (SELECT groups.key AS key, - groups.properties AS properties - FROM - (SELECT argMax(groups.group_properties, toTimeZone(groups._timestamp, 'UTC')) AS properties, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups - WHERE ifNull(equals(groups.index, 0), 0)) AS groups_0 ON equals(funnel_actors.actor_id, groups_0.key)) AS aggregation_target_with_props - GROUP BY (aggregation_target_with_props.prop).1, (aggregation_target_with_props.prop).2 - HAVING ifNull(notIn((aggregation_target_with_props.prop).1, []), 0) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_materialized.1 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_materialized.2 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_materialized.3 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_materialized.4 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_materialized.5 - ''' - SELECT concat(ifNull(toString((aggregation_target_with_props.prop).1), ''), '::', ifNull(toString((aggregation_target_with_props.prop).2), '')) AS name, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(equals(aggregation_target_with_props.steps, 2), 0)) AS success_count, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(notEquals(aggregation_target_with_props.steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - arrayJoin(JSONExtractKeysAndValues(groups_0.properties, 'String')) AS prop - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LEFT JOIN - (SELECT groups.key AS key, - groups.properties AS properties - FROM - (SELECT argMax(groups.group_properties, toTimeZone(groups._timestamp, 'UTC')) AS properties, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups - WHERE ifNull(equals(groups.index, 0), 0)) AS groups_0 ON equals(funnel_actors.actor_id, groups_0.key)) AS aggregation_target_with_props - GROUP BY (aggregation_target_with_props.prop).1, (aggregation_target_with_props.prop).2 - HAVING ifNull(notIn((aggregation_target_with_props.prop).1, []), 0) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events - ''' - SELECT concat(ifNull(toString((aggregation_target_with_props.prop).1), ''), '::', ifNull(toString((aggregation_target_with_props.prop).2), '')) AS name, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(equals(aggregation_target_with_props.steps, 2), 0)) AS success_count, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(notEquals(aggregation_target_with_props.steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - arrayJoin(arrayZip(['industry'], [JSONExtractString(groups_0.properties, 'industry')])) AS prop - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LEFT JOIN - (SELECT groups.key AS key, - groups.properties AS properties - FROM - (SELECT argMax(groups.group_properties, toTimeZone(groups._timestamp, 'UTC')) AS properties, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups - WHERE ifNull(equals(groups.index, 0), 0)) AS groups_0 ON equals(funnel_actors.actor_id, groups_0.key)) AS aggregation_target_with_props - GROUP BY (aggregation_target_with_props.prop).1, (aggregation_target_with_props.prop).2 - HAVING ifNull(notIn((aggregation_target_with_props.prop).1, []), 0) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events.1 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events.2 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events.3 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events.4 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events.5 - ''' - SELECT concat(ifNull(toString((aggregation_target_with_props.prop).1), ''), '::', ifNull(toString((aggregation_target_with_props.prop).2), '')) AS name, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(equals(aggregation_target_with_props.steps, 2), 0)) AS success_count, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(notEquals(aggregation_target_with_props.steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - arrayJoin(JSONExtractKeysAndValues(groups_0.properties, 'String')) AS prop - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LEFT JOIN - (SELECT groups.key AS key, - groups.properties AS properties - FROM - (SELECT argMax(groups.group_properties, toTimeZone(groups._timestamp, 'UTC')) AS properties, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups - WHERE ifNull(equals(groups.index, 0), 0)) AS groups_0 ON equals(funnel_actors.actor_id, groups_0.key)) AS aggregation_target_with_props - GROUP BY (aggregation_target_with_props.prop).1, (aggregation_target_with_props.prop).2 - HAVING ifNull(notIn((aggregation_target_with_props.prop).1, []), 0) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_materialized - ''' - SELECT concat(ifNull(toString((aggregation_target_with_props.prop).1), ''), '::', ifNull(toString((aggregation_target_with_props.prop).2), '')) AS name, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(equals(aggregation_target_with_props.steps, 2), 0)) AS success_count, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(notEquals(aggregation_target_with_props.steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - arrayJoin(arrayZip(['industry'], [JSONExtractString(groups_0.properties, 'industry')])) AS prop - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LEFT JOIN - (SELECT groups.key AS key, - groups.properties AS properties - FROM - (SELECT argMax(groups.group_properties, toTimeZone(groups._timestamp, 'UTC')) AS properties, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups - WHERE ifNull(equals(groups.index, 0), 0)) AS groups_0 ON equals(funnel_actors.actor_id, groups_0.key)) AS aggregation_target_with_props - GROUP BY (aggregation_target_with_props.prop).1, (aggregation_target_with_props.prop).2 - HAVING ifNull(notIn((aggregation_target_with_props.prop).1, []), 0) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_materialized.1 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_materialized.2 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_materialized.3 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_materialized.4 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_materialized.5 - ''' - SELECT concat(ifNull(toString((aggregation_target_with_props.prop).1), ''), '::', ifNull(toString((aggregation_target_with_props.prop).2), '')) AS name, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(equals(aggregation_target_with_props.steps, 2), 0)) AS success_count, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(notEquals(aggregation_target_with_props.steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - arrayJoin(JSONExtractKeysAndValues(groups_0.properties, 'String')) AS prop - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LEFT JOIN - (SELECT groups.key AS key, - groups.properties AS properties - FROM - (SELECT argMax(groups.group_properties, toTimeZone(groups._timestamp, 'UTC')) AS properties, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups - WHERE ifNull(equals(groups.index, 0), 0)) AS groups_0 ON equals(funnel_actors.actor_id, groups_0.key)) AS aggregation_target_with_props - GROUP BY (aggregation_target_with_props.prop).1, (aggregation_target_with_props.prop).2 - HAVING ifNull(notIn((aggregation_target_with_props.prop).1, []), 0) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_poe_v2 - ''' - SELECT concat(ifNull(toString((aggregation_target_with_props.prop).1), ''), '::', ifNull(toString((aggregation_target_with_props.prop).2), '')) AS name, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(equals(aggregation_target_with_props.steps, 2), 0)) AS success_count, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(notEquals(aggregation_target_with_props.steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - arrayJoin(arrayZip(['industry'], [JSONExtractString(groups_0.properties, 'industry')])) AS prop - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LEFT JOIN - (SELECT groups.key AS key, - groups.properties AS properties - FROM - (SELECT argMax(groups.group_properties, toTimeZone(groups._timestamp, 'UTC')) AS properties, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups - WHERE ifNull(equals(groups.index, 0), 0)) AS groups_0 ON equals(funnel_actors.actor_id, groups_0.key)) AS aggregation_target_with_props - GROUP BY (aggregation_target_with_props.prop).1, (aggregation_target_with_props.prop).2 - HAVING ifNull(notIn((aggregation_target_with_props.prop).1, []), 0) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_poe_v2.1 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_poe_v2.2 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_poe_v2.3 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_poe_v2.4 - ''' - SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) - GROUP BY groups.group_type_index, - groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__group_0.properties___industry, 'negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY source.actor_id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestClickhouseFunnelCorrelation.test_funnel_correlation_with_properties_and_groups_person_on_events_poe_v2.5 - ''' - SELECT concat(ifNull(toString((aggregation_target_with_props.prop).1), ''), '::', ifNull(toString((aggregation_target_with_props.prop).2), '')) AS name, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(equals(aggregation_target_with_props.steps, 2), 0)) AS success_count, - countDistinctIf(aggregation_target_with_props.actor_id, ifNull(notEquals(aggregation_target_with_props.steps, 2), 1)) AS failure_count - FROM - (SELECT funnel_actors.actor_id AS actor_id, - funnel_actors.steps AS steps, - arrayJoin(JSONExtractKeysAndValues(groups_0.properties, 'String')) AS prop - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LEFT JOIN - (SELECT groups.key AS key, - groups.properties AS properties - FROM - (SELECT argMax(groups.group_properties, toTimeZone(groups._timestamp, 'UTC')) AS properties, - groups.group_type_index AS index, - groups.group_key AS key - FROM groups - WHERE equals(groups.team_id, 2) - GROUP BY groups.group_type_index, - groups.group_key) AS groups - WHERE ifNull(equals(groups.index, 0), 0)) AS groups_0 ON equals(funnel_actors.actor_id, groups_0.key)) AS aggregation_target_with_props - GROUP BY (aggregation_target_with_props.prop).1, (aggregation_target_with_props.prop).2 - HAVING ifNull(notIn((aggregation_target_with_props.prop).1, []), 0) - LIMIT 100 - UNION ALL - SELECT 'Total_Values_In_Query' AS name, - countDistinctIf(funnel_actors.actor_id, ifNull(equals(funnel_actors.steps, 2), 0)) AS success_count, - countDistinctIf(funnel_actors.actor_id, ifNull(notEquals(funnel_actors.steps, 2), 1)) AS failure_count - FROM - (SELECT aggregation_target AS actor_id, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e.`$group_0` AS aggregation_target, - if(equals(e.event, 'user signed up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'paid'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr deleted file mode 100644 index 687a7c05a8429..0000000000000 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr +++ /dev/null @@ -1,1029 +0,0 @@ -# serializer version: 1 -# name: TestFunnelCorrelationsActors.test_funnel_correlation_on_event_with_recordings - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS event__pdi ON equals(event.distinct_id, event__pdi.distinct_id) - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, '$pageview'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'insight analyzed'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(event__pdi.person_id, funnel_actors.actor_id) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC')))), notIn(event.event, ['$pageview', 'insight analyzed']), equals(event.event, 'insight loaded'), ifNull(equals(funnel_actors.steps, 2), 0)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM events AS event - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS event__pdi ON equals(event.distinct_id, event__pdi.distinct_id) - JOIN - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_1, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, latest_0 AS latest_0, latest_1 AS latest_1, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, '$pageview'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'insight analyzed'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(event__pdi.person_id, funnel_actors.actor_id) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC')))), notIn(event.event, ['$pageview', 'insight analyzed']), equals(event.event, 'insight loaded'), ifNull(equals(funnel_actors.steps, 2), 0)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestFunnelCorrelationsActors.test_funnel_correlation_on_event_with_recordings.1 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-01-02 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s2'])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestFunnelCorrelationsActors.test_funnel_correlation_on_event_with_recordings.2 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM events AS event - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS event__pdi ON equals(event.distinct_id, event__pdi.distinct_id) - JOIN - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(step_2_matching_event) AS step_2_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_2, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - median(step_2_conversion_time) AS step_2_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time, - latest_0 AS latest_0, - latest_2 AS latest_2, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - step_2 AS step_2, - latest_2 AS latest_2, - uuid_2 AS uuid_2, - `$session_id_2` AS `$session_id_2`, - `$window_id_2` AS `$window_id_2`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - last_value(uuid_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, - last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, - last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, uuid_2) AS uuid_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, `$session_id_2`) AS `$session_id_2`, - if(ifNull(less(latest_2, latest_1), 0), NULL, `$window_id_2`) AS `$window_id_2` - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1`, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - last_value(uuid_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, - last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, - last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, '$pageview'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, - if(equals(e.event, 'insight updated'), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, - if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, - if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, - if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'insight analyzed', 'insight updated'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2, 3]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(event__pdi.person_id, funnel_actors.actor_id) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC')))), notIn(event.event, ['$pageview', 'insight analyzed', 'insight updated']), equals(event.event, 'insight loaded'), ifNull(notEquals(funnel_actors.steps, 3), 1)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM events AS event - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS event__pdi ON equals(event.distinct_id, event__pdi.distinct_id) - JOIN - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(step_2_matching_event) AS step_2_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_2, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time, latest_0 AS latest_0, latest_2 AS latest_2, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, step_2 AS step_2, latest_2 AS latest_2, uuid_2 AS uuid_2, `$session_id_2` AS `$session_id_2`, `$window_id_2` AS `$window_id_2`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, last_value(uuid_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, step_2 AS step_2, if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, if(ifNull(less(latest_2, latest_1), 0), NULL, uuid_2) AS uuid_2, if(ifNull(less(latest_2, latest_1), 0), NULL, `$session_id_2`) AS `$session_id_2`, if(ifNull(less(latest_2, latest_1), 0), NULL, `$window_id_2`) AS `$window_id_2` - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1`, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, last_value(uuid_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, '$pageview'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, if(equals(e.event, 'insight updated'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'insight analyzed', 'insight updated'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2, 3]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(event__pdi.person_id, funnel_actors.actor_id) - WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC')))), notIn(event.event, ['$pageview', 'insight analyzed', 'insight updated']), equals(event.event, 'insight loaded'), ifNull(notEquals(funnel_actors.steps, 3), 1)) - GROUP BY actor_id - ORDER BY actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestFunnelCorrelationsActors.test_funnel_correlation_on_event_with_recordings.3 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-01-02 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s2'])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestFunnelCorrelationsActors.test_funnel_correlation_on_properties_with_recordings - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, '$pageview'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'foo'), ''), 'null'), '^"|"$', '') AS properties___foo - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'insight analyzed')), ifNull(equals(e__pdi__person.properties___foo, 'bar'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_1, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, latest_0 AS latest_0, latest_1 AS latest_1, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, '$pageview'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'foo'), ''), 'null'), '^"|"$', '') AS properties___foo - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'insight analyzed')), ifNull(equals(e__pdi__person.properties___foo, 'bar'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestFunnelCorrelationsActors.test_funnel_correlation_on_properties_with_recordings.1 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-01-02 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s2'])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestFunnelCorrelationsActors.test_strict_funnel_correlation_with_recordings - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, - min(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS uuid_1, - min(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$session_id_1`, - min(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, '$pageview'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'foo'), ''), 'null'), '^"|"$', '') AS properties___foo - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), ifNull(equals(e__pdi__person.properties___foo, 'bar'), 0)))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_1, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, latest_0 AS latest_0, latest_1 AS latest_1, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, min(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS uuid_1, min(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$session_id_1`, min(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, '$pageview'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'foo'), ''), 'null'), '^"|"$', '') AS properties___foo - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), ifNull(equals(e__pdi__person.properties___foo, 'bar'), 0)))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestFunnelCorrelationsActors.test_strict_funnel_correlation_with_recordings.1 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-01-02 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s2'])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestFunnelCorrelationsActors.test_strict_funnel_correlation_with_recordings.2 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - final_matching_events AS matching_events, - timestamp AS timestamp, - steps AS steps, - final_timestamp AS final_timestamp, - first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - argMax(latest_0, steps) AS timestamp, - argMax(latest_1, steps) AS final_timestamp, - argMax(latest_0, steps) AS first_timestamp, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - latest_0 AS latest_0, - latest_1 AS latest_1, - latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, - min(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS uuid_1, - min(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$session_id_1`, - min(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, '$pageview'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'foo'), ''), 'null'), '^"|"$', '') AS properties___foo - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), ifNull(equals(e__pdi__person.properties___foo, 'bar'), 0)))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, final_matching_events AS matching_events, timestamp AS timestamp, steps AS steps, final_timestamp AS final_timestamp, first_timestamp AS first_timestamp - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, groupArray(10)(step_1_matching_event) AS step_1_matching_events, groupArray(10)(final_matching_event) AS final_matching_events, aggregation_target AS aggregation_target, steps AS steps, argMax(latest_0, steps) AS timestamp, argMax(latest_1, steps) AS final_timestamp, argMax(latest_0, steps) AS first_timestamp, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event, aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, latest_0 AS latest_0, latest_1 AS latest_1, latest_0 AS latest_0 - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, latest_1 AS latest_1, uuid_1 AS uuid_1, `$session_id_1` AS `$session_id_1`, `$window_id_1` AS `$window_id_1`, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, step_1_matching_event)) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, uuid_0 AS uuid_0, `$session_id_0` AS `$session_id_0`, `$window_id_0` AS `$window_id_0`, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, min(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS uuid_1, min(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$session_id_1`, min(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$window_id_1` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, e.uuid AS uuid, if(equals(e.event, '$pageview'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'foo'), ''), 'null'), '^"|"$', '') AS properties___foo - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id) - WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), ifNull(equals(e__pdi__person.properties___foo, 'bar'), 0)))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2]), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestFunnelCorrelationsActors.test_strict_funnel_correlation_with_recordings.3 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-01-02 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s3'])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr deleted file mode 100644 index 77fe0acf7118f..0000000000000 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr +++ /dev/null @@ -1,562 +0,0 @@ -# serializer version: 1 -# name: TestFunnelPersons.test_funnel_person_recordings - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - step_0_matching_events AS matching_events - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(step_2_matching_event) AS step_2_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - median(step_2_conversion_time) AS step_2_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - step_2 AS step_2, - latest_2 AS latest_2, - uuid_2 AS uuid_2, - `$session_id_2` AS `$session_id_2`, - `$window_id_2` AS `$window_id_2`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - last_value(uuid_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, - last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, - last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, uuid_2) AS uuid_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, `$session_id_2`) AS `$session_id_2`, - if(ifNull(less(latest_2, latest_1), 0), NULL, `$window_id_2`) AS `$window_id_2` - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1`, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - last_value(uuid_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, - last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, - last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'step one'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'step two'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, - if(equals(e.event, 'step three'), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, - if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, - if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, - if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [1, 2, 3]), 0) - ORDER BY aggregation_target ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFunnelPersons.test_funnel_person_recordings.1 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-01-02 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s1'])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestFunnelPersons.test_funnel_person_recordings.2 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - step_1_matching_events AS matching_events - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(step_2_matching_event) AS step_2_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - median(step_2_conversion_time) AS step_2_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - step_2 AS step_2, - latest_2 AS latest_2, - uuid_2 AS uuid_2, - `$session_id_2` AS `$session_id_2`, - `$window_id_2` AS `$window_id_2`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - last_value(uuid_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, - last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, - last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, uuid_2) AS uuid_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, `$session_id_2`) AS `$session_id_2`, - if(ifNull(less(latest_2, latest_1), 0), NULL, `$window_id_2`) AS `$window_id_2` - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1`, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - last_value(uuid_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, - last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, - last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'step one'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'step two'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, - if(equals(e.event, 'step three'), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, - if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, - if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, - if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(in(steps, [2, 3]), 0) - ORDER BY aggregation_target ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFunnelPersons.test_funnel_person_recordings.3 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-01-02 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s2'])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestFunnelPersons.test_funnel_person_recordings.4 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - step_1_matching_events AS matching_events - FROM - (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, - groupArray(10)(step_1_matching_event) AS step_1_matching_events, - groupArray(10)(step_2_matching_event) AS step_2_matching_events, - groupArray(10)(final_matching_event) AS final_matching_events, - aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner, - median(step_2_conversion_time) AS step_2_median_conversion_time_inner - FROM - (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event, - aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - step_2_conversion_time AS step_2_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - step_2 AS step_2, - latest_2 AS latest_2, - uuid_2 AS uuid_2, - `$session_id_2` AS `$session_id_2`, - `$window_id_2` AS `$window_id_2`, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, - if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, - tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, - tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, - tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, - if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - last_value(uuid_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, - last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, - last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - latest_1 AS latest_1, - uuid_1 AS uuid_1, - `$session_id_1` AS `$session_id_1`, - `$window_id_1` AS `$window_id_1`, - step_2 AS step_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, uuid_2) AS uuid_2, - if(ifNull(less(latest_2, latest_1), 0), NULL, `$session_id_2`) AS `$session_id_2`, - if(ifNull(less(latest_2, latest_1), 0), NULL, `$window_id_2`) AS `$window_id_2` - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - uuid_0 AS uuid_0, - `$session_id_0` AS `$session_id_0`, - `$window_id_0` AS `$window_id_0`, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, - last_value(uuid_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, - last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, - last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1`, - step_2 AS step_2, - min(latest_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, - last_value(uuid_2) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, - last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, - last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - e.uuid AS uuid, - if(equals(e.event, 'step one'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, - if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, - if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, - if(equals(e.event, 'step two'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, - if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, - if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, - if(equals(e.event, 'step three'), 1, 0) AS step_2, - if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, - if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, - if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, - if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - WHERE ifNull(equals(steps, 2), 0) - ORDER BY aggregation_target ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFunnelPersons.test_funnel_person_recordings.5 - ''' - SELECT DISTINCT session_replay_events.session_id AS session_id - FROM session_replay_events - WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-01-02 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s2'])) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr index e531f0999b2dc..ad55625544682 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr @@ -1,204 +1,5 @@ # serializer version: 1 -# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen - ''' - SELECT sum(step_1) AS step_1, - sum(step_2) AS step_2, - if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, - if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, - if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop - FROM - (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - groupArray(step_1_conversion_time) AS step_1_conversion_time_array, - prop AS prop, - row_number() OVER ( - ORDER BY step_2 DESC) AS row_number - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - prop AS prop, - min(step_1_conversion_time) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, - prop AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop_basic AS prop_basic, - prop, - prop_vals AS prop_vals, - if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'sign up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, - prop_basic AS prop, - argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - GROUP BY prop) - GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step - ''' - SELECT sum(step_1) AS step_1, - sum(step_2) AS step_2, - if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, - if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, - if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop - FROM - (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - groupArray(step_1_conversion_time) AS step_1_conversion_time_array, - prop AS prop, - row_number() OVER ( - ORDER BY step_2 DESC) AS row_number - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - prop AS prop, - min(step_1_conversion_time) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, - prop AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop_basic AS prop_basic, - prop_0 AS prop_0, - prop_1 AS prop_1, - prop, - prop_vals AS prop_vals, - prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'sign up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, - if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, - if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, - prop_1 AS prop, - groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY - JOIN prop_vals AS prop - WHERE ifNull(notEquals(prop, []), isNotNull(prop) - or isNotNull([])))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - GROUP BY prop) - GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot +# name: TestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot ''' SELECT sum(step_1) AS step_1, sum(step_2) AS step_2, @@ -294,205 +95,6 @@ allow_experimental_analyzer=1 ''' # --- -# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen - ''' - SELECT sum(step_1) AS step_1, - sum(step_2) AS step_2, - if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, - if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, - if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop - FROM - (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - groupArray(step_1_conversion_time) AS step_1_conversion_time_array, - prop AS prop, - row_number() OVER ( - ORDER BY step_2 DESC) AS row_number - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - prop AS prop, - min(step_1_conversion_time) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, - prop AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop_basic AS prop_basic, - prop, - prop_vals AS prop_vals, - if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'sign up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, - prop_basic AS prop, - argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - GROUP BY prop) - GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step - ''' - SELECT sum(step_1) AS step_1, - sum(step_2) AS step_2, - if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, - if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, - if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop - FROM - (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - groupArray(step_1_conversion_time) AS step_1_conversion_time_array, - prop AS prop, - row_number() OVER ( - ORDER BY step_2 DESC) AS row_number - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - prop AS prop, - min(step_1_conversion_time) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - prop AS prop, - max(steps) OVER (PARTITION BY aggregation_target, - prop) AS max_steps, - step_1_conversion_time AS step_1_conversion_time, - prop AS prop - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop AS prop, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, - prop AS prop - FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - prop_basic AS prop_basic, - prop_0 AS prop_0, - prop_1 AS prop_1, - prop, - prop_vals AS prop_vals, - prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - e__pdi.person_id AS aggregation_target, - if(equals(e.event, 'sign up'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, - [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, - if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, - if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, - prop_1 AS prop, - groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) - WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY - JOIN prop_vals AS prop - WHERE ifNull(notEquals(prop, []), isNotNull(prop) - or isNotNull([])))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps, - prop - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - GROUP BY prop) - GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- # name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events ''' SELECT sum(step_1) AS step_1, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr new file mode 100644 index 0000000000000..38a6cd3b37d4f --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr @@ -0,0 +1,442 @@ +# serializer version: 1 +# name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings + ''' + SELECT persons.id, + persons.id AS id, + source.matching_events AS matching_events + FROM + (SELECT aggregation_target AS actor_id, + step_0_matching_events AS matching_events + FROM + (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, + groupArray(10)(step_1_matching_event) AS step_1_matching_events, + groupArray(10)(step_2_matching_event) AS step_2_matching_events, + groupArray(10)(final_matching_event) AS final_matching_events, + aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, + tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, + tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, + if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event, + aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + latest_2 AS latest_2, + uuid_2 AS uuid_2, + `$session_id_2` AS `$session_id_2`, + `$window_id_2` AS `$window_id_2`, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, + tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, + tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, + tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, + if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + min(uuid_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS uuid_1, + min(`$session_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$session_id_1`, + min(`$window_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + min(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS uuid_2, + min(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS `$session_id_2`, + min(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS `$window_id_2` + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + e.uuid AS uuid, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, + if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, + if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, + if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, + if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, + if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, + if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC')))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + WHERE ifNull(in(steps, [1, 2, 3]), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.id ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.1 + ''' + SELECT DISTINCT session_replay_events.session_id AS session_id + FROM session_replay_events + WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-01-02 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s1'])) + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.2 + ''' + SELECT persons.id, + persons.id AS id, + source.matching_events AS matching_events + FROM + (SELECT aggregation_target AS actor_id, + step_1_matching_events AS matching_events + FROM + (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, + groupArray(10)(step_1_matching_event) AS step_1_matching_events, + groupArray(10)(step_2_matching_event) AS step_2_matching_events, + groupArray(10)(final_matching_event) AS final_matching_events, + aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, + tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, + tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, + if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event, + aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + latest_2 AS latest_2, + uuid_2 AS uuid_2, + `$session_id_2` AS `$session_id_2`, + `$window_id_2` AS `$window_id_2`, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, + tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, + tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, + tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, + if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + min(uuid_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS uuid_1, + min(`$session_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$session_id_1`, + min(`$window_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + min(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS uuid_2, + min(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS `$session_id_2`, + min(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS `$window_id_2` + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + e.uuid AS uuid, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, + if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, + if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, + if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, + if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, + if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, + if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC')))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + WHERE ifNull(in(steps, [2, 3]), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.id ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.3 + ''' + SELECT DISTINCT session_replay_events.session_id AS session_id + FROM session_replay_events + WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-01-02 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s2'])) + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.4 + ''' + SELECT persons.id, + persons.id AS id, + source.matching_events AS matching_events + FROM + (SELECT aggregation_target AS actor_id, + step_1_matching_events AS matching_events + FROM + (SELECT groupArray(10)(step_0_matching_event) AS step_0_matching_events, + groupArray(10)(step_1_matching_event) AS step_1_matching_events, + groupArray(10)(step_2_matching_event) AS step_2_matching_events, + groupArray(10)(final_matching_event) AS final_matching_events, + aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, + tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, + tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, + if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event, + aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + latest_2 AS latest_2, + uuid_2 AS uuid_2, + `$session_id_2` AS `$session_id_2`, + `$window_id_2` AS `$window_id_2`, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, + tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, + tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, + tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, + if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + min(uuid_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS uuid_1, + min(`$session_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$session_id_1`, + min(`$window_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + min(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS uuid_2, + min(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS `$session_id_2`, + min(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS `$window_id_2` + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + e.uuid AS uuid, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, + if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, + if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, + if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, + if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, + if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, + if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC')))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + WHERE ifNull(equals(steps, 2), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.id ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.5 + ''' + SELECT DISTINCT session_replay_events.session_id AS session_id + FROM session_replay_events + WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-01-02 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s2'])) + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr new file mode 100644 index 0000000000000..837c953bc93f5 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr @@ -0,0 +1,2066 @@ +# serializer version: 1 +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop_0 AS prop_0, + prop_1 AS prop_1, + prop, + prop_vals AS prop_vals, + prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY + JOIN prop_vals AS prop + WHERE ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([])))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, ['Other']) AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_2 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'buy'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), ''), ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + if(isNaN(avgArrayOrNull(step_1_conversion_time_array) AS inter_1_conversion), NULL, inter_1_conversion) AS step_1_average_conversion_time, + if(isNaN(avgArrayOrNull(step_2_conversion_time_array) AS inter_2_conversion), NULL, inter_2_conversion) AS step_2_average_conversion_time, + if(isNaN(medianArrayOrNull(step_1_conversion_time_array) AS inter_1_median), NULL, inter_1_median) AS step_1_median_conversion_time, + if(isNaN(medianArrayOrNull(step_2_conversion_time_array) AS inter_2_median), NULL, inter_2_median) AS step_2_median_conversion_time, + if(ifNull(less(row_number, 26), 0), prop, 'Other') AS final_prop + FROM + (SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, + countIf(ifNull(equals(steps, 2), 0)) AS step_2, + countIf(ifNull(equals(steps, 3), 0)) AS step_3, + groupArray(step_1_conversion_time) AS step_1_conversion_time_array, + groupArray(step_2_conversion_time) AS step_2_conversion_time_array, + prop AS prop, + row_number() OVER ( + ORDER BY step_3 DESC) AS row_number + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + prop AS prop, + min(step_1_conversion_time) AS step_1_conversion_time, + min(step_2_conversion_time) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + prop AS prop, + max(steps) OVER (PARTITION BY aggregation_target, + prop) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time, + prop AS prop + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop AS prop, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2, + prop AS prop + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps, + prop + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + GROUP BY prop) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.1 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.2 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.3 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.4 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.5 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.6 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.7 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: BaseTestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.8 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'step_1', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop_0 AS prop_0, + prop_1 AS prop_1, + prop, + prop_vals AS prop_vals, + prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY + JOIN prop_vals AS prop + WHERE ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([]))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'buy'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), ''), ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.1 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.2 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.3 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.4 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.5 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.6 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.7 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.8 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner, + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time, + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (1=1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_time_to_convert.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_time_to_convert.ambr new file mode 100644 index 0000000000000..1643acebc69ce --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_time_to_convert.ambr @@ -0,0 +1,1515 @@ +# serializer version: 1 +# name: TestFunnelTimeToConvert.test_auto_bin_count_single_step + ''' + SELECT fill.bin_from_seconds AS bin_from_seconds, + results.person_count AS person_count, + + (SELECT histogram_params.average_conversion_time AS average_conversion_time + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, + ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, + round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, + count() AS sample_count, + least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, + ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, + if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params) AS average_conversion_time + FROM + (SELECT plus( + (SELECT histogram_params.from_seconds AS from_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), multiply(floor(divide(minus(step_runs.step_1_average_conversion_time_inner, + (SELECT histogram_params.from_seconds AS from_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params)), + (SELECT histogram_params.bin_width_seconds AS bin_width_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))), + (SELECT histogram_params.bin_width_seconds AS bin_width_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))) AS bin_from_seconds, + count() AS person_count + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + GROUP BY bin_from_seconds) AS results + RIGHT OUTER JOIN + (SELECT plus( + (SELECT histogram_params.from_seconds AS from_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), multiply(numbers.number, + (SELECT histogram_params.bin_width_seconds AS bin_width_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))) AS bin_from_seconds + FROM numbers(plus(ifNull( + (SELECT histogram_params.bin_count AS bin_count + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(7))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), 0), 1)) AS numbers) AS fill ON equals(results.bin_from_seconds, fill.bin_from_seconds) + ORDER BY fill.bin_from_seconds ASC + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTimeToConvert.test_basic_strict + ''' + SELECT fill.bin_from_seconds AS bin_from_seconds, + results.person_count AS person_count, + + (SELECT histogram_params.average_conversion_time AS average_conversion_time + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, + ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, + round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, + count() AS sample_count, + least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, + ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, + if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params) AS average_conversion_time + FROM + (SELECT plus( + (SELECT histogram_params.from_seconds AS from_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), multiply(floor(divide(minus(step_runs.step_1_average_conversion_time_inner, + (SELECT histogram_params.from_seconds AS from_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params)), + (SELECT histogram_params.bin_width_seconds AS bin_width_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))), + (SELECT histogram_params.bin_width_seconds AS bin_width_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))) AS bin_from_seconds, + count() AS person_count + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + GROUP BY bin_from_seconds) AS results + RIGHT OUTER JOIN + (SELECT plus( + (SELECT histogram_params.from_seconds AS from_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), multiply(numbers.number, + (SELECT histogram_params.bin_width_seconds AS bin_width_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))) AS bin_from_seconds + FROM numbers(plus(ifNull( + (SELECT histogram_params.bin_count AS bin_count + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), 0), 1)) AS numbers) AS fill ON equals(results.bin_from_seconds, fill.bin_from_seconds) + ORDER BY fill.bin_from_seconds ASC + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTimeToConvert.test_basic_unordered + ''' + SELECT fill.bin_from_seconds AS bin_from_seconds, + results.person_count AS person_count, + + (SELECT histogram_params.average_conversion_time AS average_conversion_time + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, + ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, + round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, + count() AS sample_count, + least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, + ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, + if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + arraySort([latest_0, latest_1, latest_2]) AS event_times, + arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, + arraySort([latest_0, latest_1, latest_2]) AS conversion_times, + if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, + if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + arraySort([latest_0, latest_1, latest_2]) AS event_times, + arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, + arraySort([latest_0, latest_1, latest_2]) AS conversion_times, + if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, + if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step two'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step three'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step one'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + arraySort([latest_0, latest_1, latest_2]) AS event_times, + arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, + arraySort([latest_0, latest_1, latest_2]) AS conversion_times, + if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, + if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step three'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step one'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step two'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params) AS average_conversion_time + FROM + (SELECT plus( + (SELECT histogram_params.from_seconds AS from_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step two'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step three'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step one'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step three'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step one'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step two'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), multiply(floor(divide(minus(step_runs.step_1_average_conversion_time_inner, + (SELECT histogram_params.from_seconds AS from_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step two'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step three'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step one'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step three'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step one'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step two'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params)), + (SELECT histogram_params.bin_width_seconds AS bin_width_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step two'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step three'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step one'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step three'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step one'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step two'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))), + (SELECT histogram_params.bin_width_seconds AS bin_width_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step two'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step three'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step one'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step three'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step one'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step two'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))) AS bin_from_seconds, + count() AS person_count + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + arraySort([latest_0, latest_1, latest_2]) AS event_times, + arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, + arraySort([latest_0, latest_1, latest_2]) AS conversion_times, + if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, + if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + arraySort([latest_0, latest_1, latest_2]) AS event_times, + arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, + arraySort([latest_0, latest_1, latest_2]) AS conversion_times, + if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, + if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step two'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step three'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step one'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + arraySort([latest_0, latest_1, latest_2]) AS event_times, + arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, + arraySort([latest_0, latest_1, latest_2]) AS conversion_times, + if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, + if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step three'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step one'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step two'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + GROUP BY bin_from_seconds) AS results + RIGHT OUTER JOIN + (SELECT plus( + (SELECT histogram_params.from_seconds AS from_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step two'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step three'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step one'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step three'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step one'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step two'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), multiply(numbers.number, + (SELECT histogram_params.bin_width_seconds AS bin_width_seconds + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step two'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step three'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step one'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step three'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step one'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step two'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))) AS bin_from_seconds + FROM numbers(plus(ifNull( + (SELECT histogram_params.bin_count AS bin_count + FROM + (SELECT ifNull(floor(min(step_runs.step_1_average_conversion_time_inner)), 0) AS from_seconds, ifNull(ceil(max(step_runs.step_1_average_conversion_time_inner)), 1) AS to_seconds, round(avg(step_runs.step_1_average_conversion_time_inner), 2) AS average_conversion_time, count() AS sample_count, least(60, greatest(1, ceil(cbrt(ifNull(sample_count, 0))))) AS bin_count, ceil(divide(minus(to_seconds, from_seconds), bin_count)) AS bin_width_seconds_raw, if(ifNull(greater(bin_width_seconds_raw, 0), 0), bin_width_seconds_raw, 60) AS bin_width_seconds + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, median(step_1_conversion_time) AS step_1_median_conversion_time_inner, median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, steps AS steps, max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, step_1_conversion_time AS step_1_conversion_time, step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step three'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step two'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step three'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step one'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0) + UNION ALL SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, latest_1 AS latest_1, step_2 AS step_2, latest_2 AS latest_2, arraySort([latest_0, latest_1, latest_2]) AS event_times, arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), if(and(ifNull(less(latest_0, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 1, 0), 1]) AS steps, arraySort([latest_0, latest_1, latest_2]) AS conversion_times, if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(toTimeZone(conversion_times[1], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time, if(and(isNotNull(conversion_times[3]), ifNull(lessOrEquals(conversion_times[3], plus(toTimeZone(conversion_times[2], 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', conversion_times[2], conversion_times[3]), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, timestamp AS timestamp, step_0 AS step_0, latest_0 AS latest_0, step_1 AS step_1, min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, step_2 AS step_2, min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, e__pdi.person_id AS aggregation_target, if(equals(e.event, 'step three'), 1, 0) AS step_0, if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, if(equals(e.event, 'step one'), 1, 0) AS step_1, if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, if(equals(e.event, 'step two'), 1, 0) AS step_2, if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs + WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), 0), 1)) AS numbers) AS fill ON equals(results.bin_from_seconds, fill.bin_from_seconds) + ORDER BY fill.bin_from_seconds ASC + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr new file mode 100644 index 0000000000000..cfbc8d8a68362 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr @@ -0,0 +1,725 @@ +# serializer version: 1 +# name: BaseTestFunnelTrends.test_timezones_trends + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_timezones_trends.1 + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'US/Pacific')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_week_interval + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toIntervalWeek(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')), 0)), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfWeek(timestamp, 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_week_interval.1 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + toStartOfWeek(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC'), 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'step one', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'step two', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'step three', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2 + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) )))) + WHERE step_0 = 1 ) + WHERE toDateTime(entrance_period_start) = '2021-04-25 00:00:00' + GROUP BY aggregation_target, + entrance_period_start) + WHERE steps_completed >= 3 + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelTrends.test_timezones_trends + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrends.test_timezones_trends.1 + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'US/Pacific')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrends.test_week_interval + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toIntervalWeek(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')), 0)), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfWeek(timestamp, 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrends.test_week_interval.1 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + toStartOfWeek(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC'), 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'step one', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'step two', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'step three', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2 + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) )))) + WHERE step_0 = 1 ) + WHERE toDateTime(entrance_period_start) = '2021-04-25 00:00:00' + GROUP BY aggregation_target, + entrance_period_start) + WHERE steps_completed >= 3 + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_persons.ambr new file mode 100644 index 0000000000000..db29653790237 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_persons.ambr @@ -0,0 +1,520 @@ +# serializer version: 1 +# name: TestFunnelTrendsPersons.test_funnel_trend_persons_returns_recordings + ''' + SELECT persons.id, + persons.id AS id, + source.matching_events AS matching_events + FROM + (SELECT aggregation_target AS actor_id, + step_1_matching_events AS matching_events + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed, + groupArray(10)(step_0_matching_event) AS step_0_matching_events, + groupArray(10)(step_1_matching_event) AS step_1_matching_events, + groupArray(10)(step_2_matching_event) AS step_2_matching_events, + groupArray(10)(final_matching_event) AS final_matching_events + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + latest_2 AS latest_2, + uuid_2 AS uuid_2, + `$session_id_2` AS `$session_id_2`, + `$window_id_2` AS `$window_id_2`, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, + tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, + tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, + tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, + if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + last_value(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, + last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, + last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, uuid_2) AS uuid_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, `$session_id_2`) AS `$session_id_2`, + if(ifNull(less(latest_2, latest_1), 0), NULL, `$window_id_2`) AS `$window_id_2` + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + last_value(uuid_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, + last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, + last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + last_value(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, + last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, + last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + e.uuid AS uuid, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, + if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, + if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, + if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, + if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, + if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, + if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + WHERE ifNull(equals(entrance_period_start, toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), 0) + GROUP BY aggregation_target, + entrance_period_start) + WHERE ifNull(greaterOrEquals(steps_completed, 2), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.id ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsPersons.test_funnel_trend_persons_returns_recordings.1 + ''' + SELECT DISTINCT session_replay_events.session_id AS session_id + FROM session_replay_events + WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s1b'])) + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestFunnelTrendsPersons.test_funnel_trend_persons_with_drop_off + ''' + SELECT persons.id, + persons.id AS id, + source.matching_events AS matching_events + FROM + (SELECT aggregation_target AS actor_id, + final_matching_events AS matching_events + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed, + groupArray(10)(step_0_matching_event) AS step_0_matching_events, + groupArray(10)(step_1_matching_event) AS step_1_matching_events, + groupArray(10)(step_2_matching_event) AS step_2_matching_events, + groupArray(10)(final_matching_event) AS final_matching_events + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + latest_2 AS latest_2, + uuid_2 AS uuid_2, + `$session_id_2` AS `$session_id_2`, + `$window_id_2` AS `$window_id_2`, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, + tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, + tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, + tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, + if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + last_value(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, + last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, + last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, uuid_2) AS uuid_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, `$session_id_2`) AS `$session_id_2`, + if(ifNull(less(latest_2, latest_1), 0), NULL, `$window_id_2`) AS `$window_id_2` + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + last_value(uuid_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, + last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, + last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + last_value(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, + last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, + last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + e.uuid AS uuid, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, + if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, + if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, + if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, + if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, + if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, + if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + WHERE ifNull(equals(entrance_period_start, toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), 0) + GROUP BY aggregation_target, + entrance_period_start) + WHERE and(ifNull(greaterOrEquals(steps_completed, 1), 0), ifNull(less(steps_completed, 3), 0)) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.id ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsPersons.test_funnel_trend_persons_with_drop_off.1 + ''' + SELECT DISTINCT session_replay_events.session_id AS session_id + FROM session_replay_events + WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s1a'])) + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestFunnelTrendsPersons.test_funnel_trend_persons_with_no_to_step + ''' + SELECT persons.id, + persons.id AS id, + source.matching_events AS matching_events + FROM + (SELECT aggregation_target AS actor_id, + final_matching_events AS matching_events + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed, + groupArray(10)(step_0_matching_event) AS step_0_matching_events, + groupArray(10)(step_1_matching_event) AS step_1_matching_events, + groupArray(10)(step_2_matching_event) AS step_2_matching_events, + groupArray(10)(final_matching_event) AS final_matching_events + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + latest_2 AS latest_2, + uuid_2 AS uuid_2, + `$session_id_2` AS `$session_id_2`, + `$window_id_2` AS `$window_id_2`, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, + tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, + tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, + tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, + if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + last_value(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, + last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, + last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, uuid_2) AS uuid_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, `$session_id_2`) AS `$session_id_2`, + if(ifNull(less(latest_2, latest_1), 0), NULL, `$window_id_2`) AS `$window_id_2` + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + last_value(uuid_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, + last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, + last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + last_value(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, + last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, + last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + e.uuid AS uuid, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, + if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, + if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, + if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, + if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, + if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, + if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + WHERE ifNull(equals(entrance_period_start, toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), 0) + GROUP BY aggregation_target, + entrance_period_start) + WHERE ifNull(greaterOrEquals(steps_completed, 3), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.id ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsPersons.test_funnel_trend_persons_with_no_to_step.1 + ''' + SELECT DISTINCT session_replay_events.session_id AS session_id + FROM session_replay_events + WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s1c'])) + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr new file mode 100644 index 0000000000000..0c52cf349a36b --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr @@ -0,0 +1,602 @@ +# serializer version: 1 +# name: BaseTestFunnelTrends.test_timezones_trends + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_timezones_trends.1 + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'US/Pacific')))), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_week_interval + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + data.reached_from_step_count AS reached_from_step_count, + data.reached_to_step_count AS reached_to_step_count, + if(ifNull(greater(data.reached_from_step_count, 0), 0), round(multiply(divide(data.reached_to_step_count, data.reached_from_step_count), 100), 2), 0) AS conversion_rate + FROM + (SELECT plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toIntervalWeek(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')), 0)), 1)) AS period_offsets) AS fill + LEFT OUTER JOIN + (SELECT entrance_period_start AS entrance_period_start, + countIf(ifNull(greaterOrEquals(steps_completed, 1), 0)) AS reached_from_step_count, + countIf(ifNull(greaterOrEquals(steps_completed, 3), 0)) AS reached_to_step_count + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfWeek(timestamp, 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + GROUP BY aggregation_target, + entrance_period_start) + GROUP BY entrance_period_start) AS data ON equals(data.entrance_period_start, fill.entrance_period_start) + ORDER BY fill.entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: BaseTestFunnelTrends.test_week_interval.1 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + toStartOfWeek(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC'), 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'step one', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'step two', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'step three', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2 + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) )))) + WHERE step_0 = 1 ) + WHERE toDateTime(entrance_period_start) = '2021-04-25 00:00:00' + GROUP BY aggregation_target, + entrance_period_start) + WHERE steps_completed >= 3 + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelTrendsUDF.test_timezones_trends + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + countIf(ifNull(notEquals(data.success_bool, 0), 1)) AS reached_from_step_count, + countIf(ifNull(equals(data.success_bool, 1), 0)) AS reached_to_step_count, + if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, + data.breakdown AS prop + FROM + (SELECT arrayJoin(aggregate_funnel_array_trends(0, 3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toStartOfDay(timestamp), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + toTimeZone(af_tuple.1, 'UTC') AS entrance_period_start, + af_tuple.2 AS success_bool, + af_tuple.3 AS breakdown + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target SETTINGS date_time_output_format='iso', + date_time_input_format='best_effort') AS data + RIGHT OUTER JOIN + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')))), 1)) AS period_offsets) AS fill ON equals(data.entrance_period_start, fill.entrance_period_start) + GROUP BY entrance_period_start, + data.breakdown + ORDER BY entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsUDF.test_timezones_trends.1 + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + countIf(ifNull(notEquals(data.success_bool, 0), 1)) AS reached_from_step_count, + countIf(ifNull(equals(data.success_bool, 1), 0)) AS reached_to_step_count, + if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, + data.breakdown AS prop + FROM + (SELECT arrayJoin(aggregate_funnel_array_trends(0, 3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toStartOfDay(timestamp), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + toTimeZone(af_tuple.1, 'US/Pacific') AS entrance_period_start, + af_tuple.2 AS success_bool, + af_tuple.3 AS breakdown + FROM + (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-04-30 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target SETTINGS date_time_output_format='iso', + date_time_input_format='best_effort') AS data + RIGHT OUTER JOIN + (SELECT plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toIntervalDay(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-04-30 00:00:00'), 6, 'US/Pacific'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'US/Pacific')))), 1)) AS period_offsets) AS fill ON equals(data.entrance_period_start, fill.entrance_period_start) + GROUP BY entrance_period_start, + data.breakdown + ORDER BY entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsUDF.test_week_interval + ''' + SELECT fill.entrance_period_start AS entrance_period_start, + countIf(ifNull(notEquals(data.success_bool, 0), 1)) AS reached_from_step_count, + countIf(ifNull(equals(data.success_bool, 1), 0)) AS reached_to_step_count, + if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, + data.breakdown AS prop + FROM + (SELECT arrayJoin(aggregate_funnel_array_trends(0, 3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toStartOfWeek(timestamp, 0), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + toTimeZone(af_tuple.1, 'UTC') AS entrance_period_start, + af_tuple.2 AS success_bool, + af_tuple.3 AS breakdown + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target SETTINGS date_time_output_format='iso', + date_time_input_format='best_effort') AS data + RIGHT OUTER JOIN + (SELECT plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toIntervalWeek(period_offsets.number)) AS entrance_period_start + FROM numbers(plus(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-01 00:00:00'), 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull(('2021-05-07 23:59:59'), 6, 'UTC')), 0)), 1)) AS period_offsets) AS fill ON equals(data.entrance_period_start, fill.entrance_period_start) + GROUP BY entrance_period_start, + data.breakdown + ORDER BY entrance_period_start ASC + LIMIT 1000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsUDF.test_week_interval.1 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + toStartOfWeek(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC'), 0) AS entrance_period_start, + max(steps) AS steps_completed + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'step one', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'step two', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'step three', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2 + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['step one', 'step three', 'step two'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2021-05-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2021-05-07 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) )))) + WHERE step_0 = 1 ) + WHERE toDateTime(entrance_period_start) = '2021-04-25 00:00:00' + GROUP BY aggregation_target, + entrance_period_start) + WHERE steps_completed >= 3 + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr index 0dc9b42761ee4..e8487d4a42a9e 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr @@ -1,4 +1,240 @@ # serializer version: 1 +# name: TestFOSSFunnelUDF.test_funnel_conversion_window_seconds + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(3, 15, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_conversion_window_seconds.1 + ''' + SELECT persons.id, + persons.id AS id, + persons.created_at AS created_at, + 1 + FROM + (SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, + avg(step_2_conversion_time) AS step_2_average_conversion_time_inner, + median(step_1_conversion_time) AS step_1_median_conversion_time_inner, + median(step_2_conversion_time) AS step_2_median_conversion_time_inner + FROM + (SELECT aggregation_target AS aggregation_target, + steps AS steps, + max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, + step_1_conversion_time AS step_1_conversion_time, + step_2_conversion_time AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalSecond(15))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalSecond(15))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2 + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2 + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2 + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0))) + GROUP BY aggregation_target, + steps + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) + WHERE ifNull(in(steps, [2, 3]), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, + person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.created_at DESC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_events_with_person_on_events_v2 + ''' + + SELECT DISTINCT person_id + FROM events + WHERE team_id = 2 + AND distinct_id = 'stopped_after_pay' + ''' +# --- +# name: TestFOSSFunnelUDF.test_funnel_events_with_person_on_events_v2.1 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + breakdown AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + if(equals(e.event, 'user signed up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, '$autocapture'), match(e.elements_chain, '(^|;)button(\\.|$|;|:)'), arrayExists(x -> ifNull(equals(x, 'Pay $10'), 0), e.elements_chain_texts)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(and(equals(e.event, '$autocapture'), match(e.elements_chain, '(^|;)a(\\.|$|;|:)'), equals(e.elements_chain_href, '/movie')), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2 + FROM events AS e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 2) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2011-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2012-01-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$autocapture', 'user signed up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- # name: TestFOSSFunnelUDF.test_funnel_with_precalculated_cohort_step_filter ''' @@ -619,3 +855,1033 @@ allow_experimental_analyzer=1 ''' # --- +# name: TestFunnelBreakdownUDF.test_funnel_breakdown_correct_breakdown_props_are_chosen + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelBreakdownUDF.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'step_1', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop_0 AS prop_0, + prop_1 AS prop_1, + prop, + prop_vals AS prop_vals, + prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) ARRAY + JOIN prop_vals AS prop + WHERE ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([]))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelBreakdownUDF.test_funnel_step_multiple_breakdown_snapshot + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'buy'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + 0 AS exclusion_0, + 0 AS exclusion_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), ''), ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_aggregate_by_groups_breakdown_group_person_on_events + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e.`$group_0` AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group + ''' + SELECT sum(step_1) AS step_1, + sum(step_2) AS step_2, + sum(step_3) AS step_3, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_1_conversion_times)])[1] AS step_1_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [avgArrayOrNull(step_2_conversion_times)])[1] AS step_2_average_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_1_conversion_times)])[1] AS step_1_median_conversion_time, + arrayMap(x -> if(isNaN(x), NULL, x), [medianArrayOrNull(step_2_conversion_times)])[1] AS step_2_median_conversion_time, + groupArray(row_number) AS row_number, + final_prop AS final_prop + FROM + (SELECT countIf(ifNull(ifNull(equals(af, 0), 0), 0)) AS step_1, + countIf(ifNull(ifNull(equals(af, 1), 0), 0)) AS step_2, + countIf(ifNull(ifNull(equals(af, 2), 0), 0)) AS step_3, + groupArrayIf(timings[1], ifNull(greater(timings[1], 0), 0)) AS step_1_conversion_times, + groupArrayIf(timings[2], ifNull(greater(timings[2], 0), 0)) AS step_2_conversion_times, + rowNumberInBlock() AS row_number, + if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop + FROM + (SELECT arrayJoin(aggregate_funnel(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple, + af_tuple.1 AS af, + af_tuple.2 AS breakdown, + af_tuple.3 AS timings + FROM + (SELECT timestamp AS timestamp, + aggregation_target AS aggregation_target, + step_0 AS step_0, + latest_0 AS latest_0, + step_1 AS step_1, + latest_1 AS latest_1, + step_2 AS step_2, + latest_2 AS latest_2, + exclusion_0 AS exclusion_0, + exclusion_1 AS exclusion_1, + exclusion_2 AS exclusion_2, + prop_basic AS prop_basic, + prop, + prop_vals AS prop_vals, + prop_vals AS prop + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + e__pdi.person_id AS aggregation_target, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(equals(e.event, 'play movie'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(equals(e.event, 'buy'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + 0 AS exclusion_0, + 0 AS exclusion_1, + 0 AS exclusion_2, + ifNull(toString(e__group_0.properties___industry), '') AS prop_basic, + prop_basic AS prop, + argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + INNER JOIN + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, + person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___industry, + groups.group_type_index AS index, + groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0)) + GROUP BY groups.group_type_index, + groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) + GROUP BY aggregation_target + HAVING ifNull(greaterOrEquals(af, 0), 0)) + GROUP BY breakdown + ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) + GROUP BY final_prop + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.1 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.2 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) ))))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.3 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.4 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) ))))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.5 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.6 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) ))))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [1, 2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.7 + ''' + + SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, + count(*) as count + FROM events e + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestFunnelGroupBreakdownUDF.test_funnel_breakdown_group.8 + ''' + + SELECT aggregation_target AS actor_id + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + avg(step_2_conversion_time) step_2_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner, + median(step_2_conversion_time) step_2_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time, + step_2_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY + AND latest_1 <= latest_2 + AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + if(isNotNull(latest_2) + AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + latest_1, + step_2, + if(latest_2 < latest_1, NULL, latest_2) as latest_2 , + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, + step_2, + min(latest_2) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , + if(has(['technology', 'finance'], prop), prop, 'Other') as prop + FROM + (SELECT *, + prop_vals as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = 'sign up', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = 'play movie', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + if(event = 'buy', 1, 0) as step_2, + if(step_2 = 1, timestamp, null) as latest_2, + replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + LEFT JOIN + (SELECT group_key, + argMax(group_properties, _timestamp) AS group_properties_0 + FROM groups + WHERE team_id = 2 + AND group_type_index = 0 + GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key + WHERE team_id = 2 + AND event IN ['buy', 'play movie', 'sign up'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') + AND (step_0 = 1 + OR step_1 = 1 + OR step_2 = 1) ))))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + WHERE steps IN [2, 3] + AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) + ORDER BY aggregation_target + LIMIT 100 + OFFSET 0 SETTINGS max_ast_elements=1000000, + max_expanded_ast_elements=1000000 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel.py b/posthog/hogql_queries/insights/funnels/test/test_funnel.py index 452e2bbb6da9e..bf181fbd99b2f 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel.py @@ -2411,10 +2411,14 @@ def test_advanced_funnel_exclusions_between_steps(self): query = cast(FunnelsQuery, filter_to_query(filters)) results = FunnelsQueryRunner(query=query, team=self.team).calculate().results - self.assertEqual(results[0]["name"], "user signed up") - self.assertEqual(results[0]["count"], 0) - - self.assertEqual(results[4]["count"], 0) + # There should be no events. UDF funnels returns an empty array and says "no events" + # Old style funnels returns a count of 0 + try: + self.assertEqual([], results) + except AssertionError: + self.assertEqual(results[0]["name"], "user signed up") + self.assertEqual(results[0]["count"], 0) + self.assertEqual(results[4]["count"], 0) self.assertCountEqual(self._get_actor_ids_at_step(filters, 1), []) diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py index a40649e068a82..7be35d81324d1 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py @@ -179,7 +179,7 @@ def test_strict_breakdown_events_with_multiple_properties(self): self.assertCountEqual(self._get_actor_ids_at_step(filters, 2, ["Safari"]), [people["person2"].uuid]) -class TestStrictFunnelGroupBreakdown( +class BaseTestStrictFunnelGroupBreakdown( ClickhouseTestMixin, funnel_breakdown_group_test_factory( # type: ignore FunnelOrderType.STRICT, @@ -189,7 +189,7 @@ class TestStrictFunnelGroupBreakdown( pass -class TestFunnelStrictStepsConversionTime( +class BaseTestFunnelStrictStepsConversionTime( ClickhouseTestMixin, funnel_conversion_time_test_factory(FunnelOrderType.ORDERED, ClickhouseFunnelStrictActors), # type: ignore ): @@ -197,7 +197,7 @@ class TestFunnelStrictStepsConversionTime( pass -class TestFunnelStrictSteps(ClickhouseTestMixin, APIBaseTest): +class BaseTestFunnelStrictSteps(ClickhouseTestMixin, APIBaseTest): maxDiff = None def _get_actor_ids_at_step(self, filter, funnel_step, breakdown_value=None): @@ -630,3 +630,18 @@ def test_basic_strict_funnel_conversion_times(self): @patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) class TestFunnelStrictStepsBreakdown(BaseTestFunnelStrictStepsBreakdown): pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestFunnelStrictSteps(BaseTestFunnelStrictSteps): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestStrictFunnelGroupBreakdown(BaseTestStrictFunnelGroupBreakdown): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestFunnelStrictStepsConversionTime(BaseTestFunnelStrictStepsConversionTime): + pass diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py index 6717a26a25664..178e329d3748e 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py @@ -1,8 +1,28 @@ from unittest.mock import Mock, patch -from posthog.hogql_queries.insights.funnels.test.test_funnel_strict import BaseTestFunnelStrictStepsBreakdown +from posthog.hogql_queries.insights.funnels.test.test_funnel_strict import ( + BaseTestFunnelStrictStepsBreakdown, + BaseTestFunnelStrictSteps, + BaseTestStrictFunnelGroupBreakdown, + BaseTestFunnelStrictStepsConversionTime, +) @patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) -class TestFunnelStrictStepsBreakdownUDF(BaseTestFunnelStrictStepsBreakdown): +class TestFunnelStrictStepsBreakdown(BaseTestFunnelStrictStepsBreakdown): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelStrictSteps(BaseTestFunnelStrictSteps): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestStrictFunnelGroupBreakdown(BaseTestStrictFunnelGroupBreakdown): + pass + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelStrictStepsConversionTime(BaseTestFunnelStrictStepsConversionTime): pass diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py index 7b86e305ca89e..55a1f8660d0ca 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py @@ -1,5 +1,6 @@ from datetime import date, datetime, timedelta from typing import cast +from unittest.mock import patch, Mock from zoneinfo import ZoneInfo from freezegun.api import freeze_time @@ -23,7 +24,7 @@ FORMAT_TIME_DAY_END = "%Y-%m-%d 23:59:59" -class TestFunnelTrends(ClickhouseTestMixin, APIBaseTest): +class BaseTestFunnelTrends(ClickhouseTestMixin, APIBaseTest): maxDiff = None def _get_actors_at_step(self, filter, entrance_period_start, drop_off): @@ -130,43 +131,43 @@ def test_only_one_user_reached_one_step(self): [ { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 1, "timestamp": datetime(2021, 6, 7, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 8, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 9, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 10, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 11, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 12, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, - "conversion_rate": 0, + "conversion_rate": 0.0, "reached_from_step_count": 0, "timestamp": datetime(2021, 6, 13, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, @@ -1611,3 +1612,8 @@ def test_parses_breakdown_correctly(self): results = FunnelsQueryRunner(query=query, team=self.team).calculate().results self.assertEqual(len(results), 1) + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=False)) +class TestFunnelTrends(BaseTestFunnelTrends): + pass diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py new file mode 100644 index 0000000000000..6965222b749f5 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py @@ -0,0 +1,8 @@ +from unittest.mock import patch, Mock + +from posthog.hogql_queries.insights.funnels.test.test_funnel_trends import BaseTestFunnelTrends + + +@patch("posthoganalytics.feature_enabled", new=Mock(return_value=True)) +class TestFunnelTrendsUDF(BaseTestFunnelTrends): + pass diff --git a/posthog/hogql_queries/insights/test/test_stickiness_query_runner.py b/posthog/hogql_queries/insights/test/test_stickiness_query_runner.py index 9a93bf00c9630..50791bc69ff8a 100644 --- a/posthog/hogql_queries/insights/test/test_stickiness_query_runner.py +++ b/posthog/hogql_queries/insights/test/test_stickiness_query_runner.py @@ -28,6 +28,7 @@ PropertyGroupFilter, PropertyOperator, RecordingPropertyFilter, + LogEntryPropertyFilter, SessionPropertyFilter, StickinessFilter, StickinessQuery, @@ -60,6 +61,7 @@ class SeriesTestData: SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, diff --git a/posthog/hogql_queries/insights/trends/breakdown.py b/posthog/hogql_queries/insights/trends/breakdown.py index 24d281d6b2bf2..b1fba204f44b7 100644 --- a/posthog/hogql_queries/insights/trends/breakdown.py +++ b/posthog/hogql_queries/insights/trends/breakdown.py @@ -22,6 +22,8 @@ InCohortVia, MultipleBreakdownType, TrendsQuery, +) +from posthog.schema import ( Breakdown as BreakdownSchema, ) @@ -169,14 +171,6 @@ def _breakdown_filter(self) -> BreakdownFilter: """ return cast(BreakdownFilter, self.query.breakdownFilter) - @property - def hide_other_aggregation(self) -> bool: - return ( - self.query.breakdownFilter.breakdown_hide_other_aggregation or False - if self.query.breakdownFilter - else False - ) - def _get_cohort_filter(self, breakdowns: list[str | int] | list[str] | str | int): if breakdowns == "all": return None diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr index e6896e9910b67..69d6e856f5540 100644 --- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr +++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr @@ -3726,12 +3726,12 @@ ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY e.`$session_id`, breakdown_value) @@ -3772,12 +3772,12 @@ ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY e.`$session_id`, breakdown_value) @@ -3818,12 +3818,12 @@ ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value_1 FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY e.`$session_id`, breakdown_value_1) @@ -3864,12 +3864,12 @@ ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value_1 FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY e.`$session_id`, breakdown_value_1) @@ -4297,12 +4297,12 @@ ifNull(nullIf(toString(e__pdi__person.`properties___$some_prop`), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -4361,12 +4361,12 @@ ifNull(nullIf(toString(e__pdi__person.`properties___$some_prop`), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value_1 FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS e__pdi___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, @@ -4438,12 +4438,12 @@ (SELECT any(e__session.`$session_duration`) AS session_duration FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY e.`$session_id` ORDER BY 1 DESC) @@ -4463,12 +4463,12 @@ (SELECT any(e__session.`$session_duration`) AS session_duration FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY e.`$session_id` ORDER BY 1 DESC) @@ -4497,12 +4497,12 @@ toStartOfWeek(toTimeZone(e.timestamp, 'UTC'), 0) AS day_start FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start, e.`$session_id`, @@ -4536,12 +4536,12 @@ toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start, e.`$session_id`, @@ -4584,12 +4584,12 @@ toStartOfWeek(toTimeZone(e.timestamp, 'UTC'), 0) AS day_start FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start, e.`$session_id`, @@ -4639,12 +4639,12 @@ toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start, e.`$session_id`, @@ -4694,12 +4694,12 @@ toStartOfWeek(toTimeZone(e.timestamp, 'UTC'), 0) AS day_start FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start, e.`$session_id`, @@ -4749,12 +4749,12 @@ toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start FROM events AS e SAMPLE 1 LEFT JOIN - (SELECT dateDiff('second', min(toTimeZone(sessions.min_timestamp, 'UTC')), max(toTimeZone(sessions.max_timestamp, 'UTC'))) AS `$session_duration`, - sessions.session_id AS session_id - FROM sessions - WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) - GROUP BY sessions.session_id, - sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id) + (SELECT dateDiff('second', min(toTimeZone(raw_sessions.min_timestamp, 'UTC')), max(toTimeZone(raw_sessions.max_timestamp, 'UTC'))) AS `$session_duration`, + raw_sessions.session_id_v7 AS session_id_v7 + FROM raw_sessions + WHERE and(equals(raw_sessions.team_id, 2), ifNull(greaterOrEquals(plus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(raw_sessions.session_id_v7, 80)), 1000)), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0)) + GROUP BY raw_sessions.session_id_v7, + raw_sessions.session_id_v7) AS e__session ON equals(toUInt128(accurateCastOrNull(e.`$session_id`, 'UUID')), e__session.session_id_v7) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start, e.`$session_id`, diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py index af0a35160cf6c..a7de68d01ef9b 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py @@ -4792,3 +4792,43 @@ def test_multiple_series_and_multiple_breakdowns_work_with_formula(self): [0, 0, 0, 100, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 100, 0, 0, 0, 0, 0], ] + + def test_trends_with_formula_and_multiple_breakdowns_hide_other_breakdowns(self): + PropertyDefinition.objects.create(team=self.team, name="breakdown_value", property_type="String") + + for value in list(range(30)): + _create_event( + team=self.team, + event="$pageview", + distinct_id=f"person_{value}", + timestamp="2020-01-11T12:00:00Z", + properties={"breakdown_value": str(value)}, + ) + + response = self._run_trends_query( + "2020-01-09", + "2020-01-20", + IntervalType.DAY, + [EventsNode(event="$pageview"), EventsNode(event="$pageview")], + TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH, formula="A+B"), + BreakdownFilter( + breakdowns=[Breakdown(property="breakdown_value", type=MultipleBreakdownType.EVENT)], breakdown_limit=10 + ), + ) + breakdowns = [b for result in response.results for b in result["breakdown_value"]] + self.assertIn(BREAKDOWN_OTHER_STRING_LABEL, breakdowns) + + response = self._run_trends_query( + "2020-01-09", + "2020-01-20", + IntervalType.DAY, + [EventsNode(event="$pageview"), EventsNode(event="$pageview")], + TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH, formula="A+B"), + BreakdownFilter( + breakdowns=[Breakdown(property="breakdown_value", type=MultipleBreakdownType.EVENT)], + breakdown_limit=10, + breakdown_hide_other_aggregation=True, + ), + ) + breakdowns = [b for result in response.results for b in result["breakdown_value"]] + self.assertNotIn(BREAKDOWN_OTHER_STRING_LABEL, breakdowns) diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index c05381cec1863..387fabfcaa7af 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -423,9 +423,9 @@ def run( timings.extend(timing) has_more = False - if self.breakdown_enabled and any(item["label"] == BREAKDOWN_OTHER_STRING_LABEL for item in final_result): + if self.breakdown_enabled and any(self._is_other_breakdown(item["breakdown_value"]) for item in final_result): if self.query.breakdownFilter and self.query.breakdownFilter.breakdown_hide_other_aggregation: - final_result = [item for item in final_result if item["label"] != BREAKDOWN_OTHER_STRING_LABEL] + final_result = [item for item in final_result if not self._is_other_breakdown(item["breakdown_value"])] has_more = True return TrendsQueryResponse( @@ -802,6 +802,7 @@ def apply_formula( if isinstance(single_or_multiple_breakdown_value, tuple) else single_or_multiple_breakdown_value ) + any_result: Optional[dict[str, Any]] = None for result in results: matching_result = [item for item in result if itemgetter(*keys)(item) == breakdown_value] @@ -1059,3 +1060,10 @@ def _get_breakdown_items( res_breakdown.append(item) return res_breakdown + + def _is_other_breakdown(self, breakdown: BreakdownItem | list[BreakdownItem]) -> bool: + return ( + breakdown == BREAKDOWN_OTHER_STRING_LABEL + or isinstance(breakdown, list) + and BREAKDOWN_OTHER_STRING_LABEL in breakdown + ) diff --git a/posthog/hogql_queries/insights/utils/utils.py b/posthog/hogql_queries/insights/utils/utils.py index 747d7e2b6ca5a..15689aba7927e 100644 --- a/posthog/hogql_queries/insights/utils/utils.py +++ b/posthog/hogql_queries/insights/utils/utils.py @@ -10,3 +10,8 @@ def get_start_of_interval_hogql(interval: str, *, team: Team, source: Optional[a if trunc_func == "toStartOfWeek": trunc_func_args.append(ast.Constant(value=int((WeekStartDay(team.week_start_day or 0)).clickhouse_mode))) return ast.Call(name=trunc_func, args=trunc_func_args) + + +def get_start_of_interval_hogql_str(interval: str, *, team: Team, source: str) -> str: + trunc_func = get_trunc_func_ch(interval) + return f"{trunc_func}({source}{f', {int((WeekStartDay(team.week_start_day or 0)).clickhouse_mode)}' if trunc_func == 'toStartOfWeek' else ''})" diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 55e2426b46ccf..8edc5449c1583 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -542,7 +542,7 @@ def run( tag_queries(cache_key=cache_key) tag_queries(sentry_trace=get_traceparent()) set_tag("cache_key", cache_key) - set_tag("query_type", self.query.__class__.__name__) + set_tag("query_type", getattr(self.query, "kind", "Other")) if insight_id: tag_queries(insight_id=insight_id) set_tag("insight_id", str(insight_id)) diff --git a/posthog/management/commands/backfill_raw_sessions_table.py b/posthog/management/commands/backfill_raw_sessions_table.py index 0a35746e99685..c28bd5153817a 100644 --- a/posthog/management/commands/backfill_raw_sessions_table.py +++ b/posthog/management/commands/backfill_raw_sessions_table.py @@ -20,7 +20,7 @@ TARGET_TABLE = "raw_sessions" SETTINGS = { - "max_execution_time": 3600 # 1 hour + "max_execution_time": 7200 # 2 hours } diff --git a/posthog/management/commands/test/test_migrate_action_webhooks.py b/posthog/management/commands/test/test_migrate_action_webhooks.py index 52e625a0610d5..45f6a393b2809 100644 --- a/posthog/management/commands/test/test_migrate_action_webhooks.py +++ b/posthog/management/commands/test/test_migrate_action_webhooks.py @@ -1,4 +1,6 @@ from inline_snapshot import snapshot + +from hogvm.python.operation import HOGQL_BYTECODE_VERSION from posthog.cdp.templates.webhook.template_webhook import template as template_webhook from posthog.management.commands.migrate_action_webhooks import migrate_action_webhooks from posthog.models.action.action import Action @@ -54,7 +56,7 @@ def test_migrates_base_action_config_correctly(self): assert hog_function.name == f"Webhook for action {self.action.id} (Test Action)" assert hog_function.filters == { "actions": [{"id": f"{self.action.id}", "name": "Test Action", "type": "actions", "order": 0}], - "bytecode": ["_h", 29, 3, 1, 4, 1], + "bytecode": ["_H", HOGQL_BYTECODE_VERSION, 29, 3, 1, 4, 1], } assert hog_function.hog == template_webhook.hog assert hog_function.inputs_schema == template_webhook.inputs_schema diff --git a/posthog/management/commands/test/test_migrate_hooks.py b/posthog/management/commands/test/test_migrate_hooks.py index 90910c915c469..62841f0785c2f 100644 --- a/posthog/management/commands/test/test_migrate_hooks.py +++ b/posthog/management/commands/test/test_migrate_hooks.py @@ -1,4 +1,5 @@ from ee.models.hook import Hook +from hogvm.python.operation import HOGQL_BYTECODE_VERSION from posthog.cdp.templates.zapier.template_zapier import template as template_zapier from posthog.management.commands.migrate_hooks import migrate_hooks from posthog.models.action.action import Action @@ -54,7 +55,7 @@ def test_migrates_hook_correctly(self): assert hog_function.name == f"Zapier webhook for action {self.action.id}" assert hog_function.filters == { "actions": [{"id": f"{self.action.id}", "name": "", "type": "actions", "order": 0}], - "bytecode": ["_h", 29, 3, 1, 4, 1], + "bytecode": ["_H", HOGQL_BYTECODE_VERSION, 29, 3, 1, 4, 1], } assert hog_function.hog == template_zapier.hog assert hog_function.inputs_schema == template_zapier.inputs_schema diff --git a/posthog/management/commands/test_migrations_are_safe.py b/posthog/management/commands/test_migrations_are_safe.py index 0a921cc98d2da..41ef0df6f90db 100644 --- a/posthog/management/commands/test_migrations_are_safe.py +++ b/posthog/management/commands/test_migrations_are_safe.py @@ -42,6 +42,8 @@ def validate_migration_sql(sql) -> bool: and "CREATE TABLE" not in operation_sql and "ADD CONSTRAINT" not in operation_sql and "-- not-null-ignore" not in operation_sql + # Ignore for brand-new tables + and (table_being_altered not in tables_created_so_far or table_being_altered not in new_tables) ): print( f"\n\n\033[91mFound a non-null field or default added to an existing model. This will lock up the table while migrating. Please add 'null=True, blank=True' to the field.\nSource: `{operation_sql}`" diff --git a/posthog/migrations/0458_alter_insightviewed_team_alter_insightviewed_user.py b/posthog/migrations/0458_alter_insightviewed_team_alter_insightviewed_user.py new file mode 100644 index 0000000000000..6e10646775fd0 --- /dev/null +++ b/posthog/migrations/0458_alter_insightviewed_team_alter_insightviewed_user.py @@ -0,0 +1,58 @@ +# Generated by Django 4.2.14 on 2024-08-21 07:01 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + atomic = False # Allow concurrent operations + + dependencies = [ + ("posthog", "0457_datawarehousejoin_deleted_at_and_more"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + state_operations=[ + migrations.AlterField( + model_name="insightviewed", + name="team", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.team", + ), + ), + migrations.AlterField( + model_name="insightviewed", + name="user", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), + ], + database_operations=[ + migrations.RunSQL( + """ + ALTER TABLE "posthog_insightviewed" ALTER COLUMN "team_id" DROP NOT NULL; + """, + reverse_sql=""" + ALTER TABLE "posthog_insightviewed" ALTER COLUMN "team_id" SET NOT NULL; + """, + ), + migrations.RunSQL( + """ + ALTER TABLE "posthog_insightviewed" ALTER COLUMN "user_id" DROP NOT NULL; + """, + reverse_sql=""" + ALTER TABLE "posthog_insightviewed" ALTER COLUMN "user_id" SET NOT NULL; + """, + ), + ], + ), + ] diff --git a/posthog/migrations/0459_convert_personsnode_insights_to_actorsquery.py b/posthog/migrations/0459_convert_personsnode_insights_to_actorsquery.py new file mode 100644 index 0000000000000..51704207b7cff --- /dev/null +++ b/posthog/migrations/0459_convert_personsnode_insights_to_actorsquery.py @@ -0,0 +1,49 @@ +# Generated by Django 4.2.14 on 2024-08-22 14:00 +from typing import cast + +from django.db import migrations + +from posthog.schema import ActorsQuery, CohortPropertyFilter, DataTableNode, NodeKind, PersonsNode + + +def convert_insights(apps, schema_editor): + Insight = apps.get_model("posthog", "Insight") + + insights = Insight.objects.filter(query__kind=NodeKind.DATA_TABLE_NODE, query__source__kind=NodeKind.PERSONS_NODE) + + for insight in insights.iterator(chunk_size=100): + try: + query_dict = insight.query.copy() + query = DataTableNode(**query_dict) + node = cast(PersonsNode, query.source) + + properties = node.properties or [] + if node.cohort is not None: + properties.append(CohortPropertyFilter(key="id", value=node.cohort)) + + del query_dict["source"] + + updated_query = DataTableNode( + **query_dict, + source=ActorsQuery( + search=node.search, + properties=properties, + fixedProperties=node.fixedProperties, + limit=node.limit, + offset=node.offset, + ), + ) + insight.query = updated_query.model_dump(exclude_none=True) + insight.save() + except: + pass + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0458_alter_insightviewed_team_alter_insightviewed_user"), + ] + + operations = [ + migrations.RunPython(convert_insights, migrations.RunPython.noop, elidable=True), + ] diff --git a/posthog/migrations/0460_alertconfiguration_threshold_alertsubscription_and_more.py b/posthog/migrations/0460_alertconfiguration_threshold_alertsubscription_and_more.py new file mode 100644 index 0000000000000..f53046b41046f --- /dev/null +++ b/posthog/migrations/0460_alertconfiguration_threshold_alertsubscription_and_more.py @@ -0,0 +1,161 @@ +# Generated by Django 4.2.14 on 2024-08-26 10:25 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import django.db.models.expressions +import posthog.models.utils + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0459_convert_personsnode_insights_to_actorsquery"), + ] + + operations = [ + migrations.CreateModel( + name="AlertConfiguration", + fields=[ + ("created_at", models.DateTimeField(auto_now_add=True)), + ( + "id", + models.UUIDField( + default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + ), + ), + ("name", models.CharField(blank=True, max_length=255)), + ("condition", models.JSONField(default=dict)), + ( + "state", + models.CharField( + choices=[("firing", "Firing"), ("inactive", "Inactive")], default="inactive", max_length=10 + ), + ), + ("enabled", models.BooleanField(default=True)), + ("last_notified_at", models.DateTimeField(blank=True, null=True)), + ( + "created_by", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + ), + ), + ("insight", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.insight")), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="Threshold", + fields=[ + ("created_at", models.DateTimeField(auto_now_add=True)), + ( + "id", + models.UUIDField( + default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + ), + ), + ("name", models.CharField(blank=True, max_length=255)), + ("configuration", models.JSONField(default=dict)), + ( + "created_by", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + ), + ), + ("insight", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.insight")), + ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="AlertSubscription", + fields=[ + ("created_at", models.DateTimeField(auto_now_add=True)), + ( + "id", + models.UUIDField( + default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + ), + ), + ("subscribed", models.BooleanField(default=True)), + ( + "alert_configuration", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.alertconfiguration"), + ), + ( + "created_by", + models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + ), + ), + ( + "user", + models.ForeignKey( + limit_choices_to={ + "is_active": True, + "organization_id": django.db.models.expressions.OuterRef( + "alert_configuration__team__organization_id" + ), + }, + on_delete=django.db.models.deletion.CASCADE, + related_name="alert_subscriptions", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "unique_together": {("user", "alert_configuration")}, + }, + ), + migrations.AddField( + model_name="alertconfiguration", + name="subscribed_users", + field=models.ManyToManyField( + related_name="alert_configurations", through="posthog.AlertSubscription", to=settings.AUTH_USER_MODEL + ), + ), + migrations.AddField( + model_name="alertconfiguration", + name="team", + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), + migrations.AddField( + model_name="alertconfiguration", + name="threshold", + field=models.ForeignKey( + blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.threshold" + ), + ), + migrations.CreateModel( + name="AlertCheck", + fields=[ + ( + "id", + models.UUIDField( + default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("calculated_value", models.FloatField(blank=True, null=True)), + ("condition", models.JSONField(default=dict)), + ("targets_notified", models.JSONField(default=dict)), + ("error", models.JSONField(blank=True, null=True)), + ( + "state", + models.CharField( + choices=[("firing", "Firing"), ("not_met", "Not Met")], default="not_met", max_length=10 + ), + ), + ( + "alert_configuration", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.alertconfiguration"), + ), + ], + options={ + "abstract": False, + }, + ), + ] diff --git a/posthog/models/__init__.py b/posthog/models/__init__.py index 36bc77625ad95..674d665288439 100644 --- a/posthog/models/__init__.py +++ b/posthog/models/__init__.py @@ -17,7 +17,7 @@ from .action.action_step import ActionStep from .activity_logging.activity_log import ActivityLog from .activity_logging.notification_viewed import NotificationViewed -from .alert import Alert +from .alert import AlertConfiguration from .annotation import Annotation from .async_deletion import AsyncDeletion, DeletionType from .async_migration import AsyncMigration, AsyncMigrationError, MigrationStatus @@ -75,7 +75,7 @@ from .user_scene_personalisation import UserScenePersonalisation __all__ = [ - "Alert", + "AlertConfiguration", "Action", "ActionStep", "ActivityLog", diff --git a/posthog/models/_deprecated_prompts.py b/posthog/models/_deprecated_prompts.py index 780703017519e..a203f3fb7e2c9 100644 --- a/posthog/models/_deprecated_prompts.py +++ b/posthog/models/_deprecated_prompts.py @@ -5,51 +5,47 @@ # DEPRECATED - DO NOT USE class Prompt(models.Model): - step: models.IntegerField = models.IntegerField() - type: models.CharField = models.CharField(max_length=200) # tooltip, modal, etc - title: models.CharField = models.CharField(max_length=200) - text: models.CharField = models.CharField(max_length=1000) - placement: models.CharField = models.CharField( + step = models.IntegerField() + type = models.CharField(max_length=200) # tooltip, modal, etc + title = models.CharField(max_length=200) + text = models.CharField(max_length=1000) + placement = models.CharField( max_length=200, default="top" ) # top, bottom, left, right, top-start, bottom-start, etc. - buttons: models.JSONField = models.JSONField() - reference: models.CharField = models.CharField( + buttons = models.JSONField() + reference = models.CharField( max_length=200, default=None, null=True ) # should match a `data-attr` reference to attach to a component - icon: models.CharField = models.CharField(max_length=200) # sync with iconMap in frontend + icon = models.CharField(max_length=200) # sync with iconMap in frontend # DEPRECATED - DO NOT USE class PromptSequence(models.Model): + key = models.CharField(max_length=200) + type = models.CharField(max_length=200) # we use this to toggle different behaviors in the frontend + path_match: ArrayField = ArrayField(models.CharField(max_length=200)) # wildcard path to match the current URL + path_exclude: ArrayField = ArrayField(models.CharField(max_length=200)) # wildcard path to exclude the current URL + status = models.CharField(max_length=200) # active, inactive, etc + must_have_completed = models.ManyToManyField("self", blank=True, symmetrical=False) + requires_opt_in = models.BooleanField(default=False) + prompts = models.ManyToManyField(Prompt) + autorun = models.BooleanField(default=True) # whether to run this sequence automatically for all users + class Meta: constraints = [ models.UniqueConstraint(fields=["key"], name="unique_prompt_sequence"), ] - key: models.CharField = models.CharField(max_length=200) - type: models.CharField = models.CharField( - max_length=200 - ) # we use this to toggle different behaviors in the frontend - path_match: ArrayField = ArrayField(models.CharField(max_length=200)) # wildcard path to match the current URL - path_exclude: ArrayField = ArrayField(models.CharField(max_length=200)) # wildcard path to exclude the current URL - status: models.CharField = models.CharField(max_length=200) # active, inactive, etc - must_have_completed: models.ManyToManyField = models.ManyToManyField("self", blank=True, symmetrical=False) - requires_opt_in: models.BooleanField = models.BooleanField(default=False) - prompts: models.ManyToManyField = models.ManyToManyField(Prompt) - autorun: models.BooleanField = models.BooleanField( - default=True - ) # whether to run this sequence automatically for all users - # DEPRECATED - DO NOT USE class UserPromptState(models.Model): - class Meta: - constraints = [models.UniqueConstraint(fields=["user", "sequence"], name="unique_user_prompt_state")] + user = models.ForeignKey("User", on_delete=models.CASCADE) + sequence = models.ForeignKey(PromptSequence, on_delete=models.CASCADE) - user: models.ForeignKey = models.ForeignKey("User", on_delete=models.CASCADE) - sequence: models.ForeignKey = models.ForeignKey(PromptSequence, on_delete=models.CASCADE) + last_updated_at = models.DateTimeField(default=timezone.now) + step = models.IntegerField(default=None, null=True) + completed = models.BooleanField(default=False) + dismissed = models.BooleanField(default=False) - last_updated_at: models.DateTimeField = models.DateTimeField(default=timezone.now) - step: models.IntegerField = models.IntegerField(default=None, null=True) - completed: models.BooleanField = models.BooleanField(default=False) - dismissed: models.BooleanField = models.BooleanField(default=False) + class Meta: + constraints = [models.UniqueConstraint(fields=["user", "sequence"], name="unique_user_prompt_state")] diff --git a/posthog/models/action/action.py b/posthog/models/action/action.py index 82c54f44ea167..1b4d6767f461e 100644 --- a/posthog/models/action/action.py +++ b/posthog/models/action/action.py @@ -30,26 +30,26 @@ class ActionStepJSON: class Action(models.Model): - class Meta: - indexes = [models.Index(fields=["team_id", "-updated_at"])] - - name: models.CharField = models.CharField(max_length=400, null=True, blank=True) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - description: models.TextField = models.TextField(blank=True, default="") - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) - deleted: models.BooleanField = models.BooleanField(default=False) - events: models.ManyToManyField = models.ManyToManyField("Event", blank=True) - post_to_slack: models.BooleanField = models.BooleanField(default=False) - slack_message_format: models.CharField = models.CharField(default="", max_length=1200, blank=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - bytecode: models.JSONField = models.JSONField(null=True, blank=True) - bytecode_error: models.TextField = models.TextField(blank=True, null=True) - steps_json: models.JSONField = models.JSONField(null=True, blank=True) + name = models.CharField(max_length=400, null=True, blank=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + description = models.TextField(blank=True, default="") + created_at = models.DateTimeField(auto_now_add=True, blank=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + deleted = models.BooleanField(default=False) + events = models.ManyToManyField("Event", blank=True) + post_to_slack = models.BooleanField(default=False) + slack_message_format = models.CharField(default="", max_length=1200, blank=True) + updated_at = models.DateTimeField(auto_now=True) + bytecode = models.JSONField(null=True, blank=True) + bytecode_error = models.TextField(blank=True, null=True) + steps_json = models.JSONField(null=True, blank=True) # DEPRECATED: these were used before ClickHouse was our database - is_calculating: models.BooleanField = models.BooleanField(default=False) - last_calculated_at: models.DateTimeField = models.DateTimeField(default=timezone.now, blank=True) + is_calculating = models.BooleanField(default=False) + last_calculated_at = models.DateTimeField(default=timezone.now, blank=True) + + class Meta: + indexes = [models.Index(fields=["team_id", "-updated_at"])] def __str__(self): return self.name diff --git a/posthog/models/action/action_step.py b/posthog/models/action/action_step.py index c2722f87ae07d..975137ba39d16 100644 --- a/posthog/models/action/action_step.py +++ b/posthog/models/action/action_step.py @@ -8,26 +8,26 @@ class ActionStep(models.Model): EXACT = "exact" STRING_MATCHING = [(CONTAINS, CONTAINS), (REGEX, REGEX), (EXACT, EXACT)] - action: models.ForeignKey = models.ForeignKey("Action", related_name="action_steps", on_delete=models.CASCADE) - text: models.CharField = models.CharField(max_length=400, null=True, blank=True) - text_matching: models.CharField = models.CharField( + action = models.ForeignKey("Action", related_name="action_steps", on_delete=models.CASCADE) + text = models.CharField(max_length=400, null=True, blank=True) + text_matching = models.CharField( # The implicit default is EXACT - no explicit default to avoid migration woes max_length=400, choices=STRING_MATCHING, null=True, blank=True, ) - href: models.CharField = models.CharField(max_length=65535, null=True, blank=True) - href_matching: models.CharField = models.CharField( + href = models.CharField(max_length=65535, null=True, blank=True) + href_matching = models.CharField( # The implicit default is EXACT - no explicit default to avoid migration woes max_length=400, choices=STRING_MATCHING, null=True, blank=True, ) - selector: models.CharField = models.CharField(max_length=65535, null=True, blank=True) - url: models.CharField = models.CharField(max_length=65535, null=True, blank=True) - url_matching: models.CharField = models.CharField( + selector = models.CharField(max_length=65535, null=True, blank=True) + url = models.CharField(max_length=65535, null=True, blank=True) + url_matching = models.CharField( # This is from before text_matching and href_matching, which is why there's an explicit default of CONTAINS max_length=400, choices=STRING_MATCHING, @@ -35,9 +35,9 @@ class ActionStep(models.Model): null=True, blank=True, ) - event: models.CharField = models.CharField(max_length=400, null=True, blank=True) - properties: models.JSONField = models.JSONField(default=list, null=True, blank=True) + event = models.CharField(max_length=400, null=True, blank=True) + properties = models.JSONField(default=list, null=True, blank=True) # DEPRECATED, DISUSED - name: models.CharField = models.CharField(max_length=400, null=True, blank=True) + name = models.CharField(max_length=400, null=True, blank=True) # DEPRECATED, don't store new data here - tag_name: models.CharField = models.CharField(max_length=400, null=True, blank=True) + tag_name = models.CharField(max_length=400, null=True, blank=True) diff --git a/posthog/models/activity_logging/activity_log.py b/posthog/models/activity_logging/activity_log.py index 1fb332e2e9e5f..82c13105f1ebb 100644 --- a/posthog/models/activity_logging/activity_log.py +++ b/posthog/models/activity_logging/activity_log.py @@ -129,7 +129,7 @@ class Meta: # e.g. FeatureFlags - this will often be the name of a model class scope = models.fields.CharField(max_length=79, null=False) detail = models.JSONField(encoder=ActivityDetailEncoder, null=True) - created_at: models.DateTimeField = models.DateTimeField(default=timezone.now) + created_at = models.DateTimeField(default=timezone.now) common_field_exclusions = [ diff --git a/posthog/models/activity_logging/notification_viewed.py b/posthog/models/activity_logging/notification_viewed.py index cdecb69c1dbca..804e38de0f379 100644 --- a/posthog/models/activity_logging/notification_viewed.py +++ b/posthog/models/activity_logging/notification_viewed.py @@ -7,7 +7,7 @@ class NotificationViewed(UUIDModel): user = models.ForeignKey("posthog.User", null=True, on_delete=models.SET_NULL) # when viewing notifications made by viewing the activity log we count unread notifications # as any after the last viewed date - last_viewed_activity_date: models.DateTimeField = models.DateTimeField(default=None) + last_viewed_activity_date = models.DateTimeField(default=None) class Meta: constraints = [models.UniqueConstraint(fields=["user"], name="posthog_user_unique_viewed_date")] diff --git a/posthog/models/alert.py b/posthog/models/alert.py index 8aa62cf977b7a..807bc0a0437aa 100644 --- a/posthog/models/alert.py +++ b/posthog/models/alert.py @@ -1,20 +1,213 @@ +from datetime import datetime, UTC, timedelta +from typing import Any, Optional + from django.db import models +from django.core.exceptions import ValidationError + +from posthog.hogql_queries.legacy_compatibility.flagged_conversion_manager import conversion_to_query_based from posthog.models.insight import Insight +from posthog.models.utils import UUIDModel, CreatedMetaFields +from posthog.schema import AlertCondition, InsightThreshold def are_alerts_supported_for_insight(insight: Insight) -> bool: - query = insight.query - if query is None or query.get("kind") != "TrendsQuery": - return False - if query.get("trendsFilter", {}).get("display") != "BoldNumber": - return False + with conversion_to_query_based(insight): + query = insight.query + while query.get("source"): + query = query["source"] + if query is None or query.get("kind") != "TrendsQuery": + return False + if query.get("trendsFilter", {}).get("display") != "BoldNumber": + return False return True +class ConditionValidator: + def __init__(self, threshold: Optional[InsightThreshold], condition: AlertCondition): + self.threshold = threshold + self.condition = condition + + def validate(self, calculated_value: float) -> list[str]: + validators: Any = [ + self.validate_absolute_threshold, + ] + matches = [] + for validator in validators: + matches += validator(calculated_value) + return matches + + def validate_absolute_threshold(self, calculated_value: float) -> list[str]: + if not self.threshold or not self.threshold.absoluteThreshold: + return [] + + absolute_threshold = self.threshold.absoluteThreshold + if absolute_threshold.lower is not None and calculated_value < absolute_threshold.lower: + return [f"The trend value ({calculated_value}) is below the lower threshold ({absolute_threshold.lower})"] + if absolute_threshold.upper is not None and calculated_value > absolute_threshold.upper: + return [f"The trend value ({calculated_value}) is above the upper threshold ({absolute_threshold.upper})"] + return [] + + class Alert(models.Model): - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - insight: models.ForeignKey = models.ForeignKey("posthog.Insight", on_delete=models.CASCADE) + """ + @deprecated("AlertConfiguration should be used instead.") + """ + + team = models.ForeignKey("Team", on_delete=models.CASCADE) + insight = models.ForeignKey("posthog.Insight", on_delete=models.CASCADE) + + name = models.CharField(max_length=100) + target_value = models.TextField() + anomaly_condition = models.JSONField(default=dict) + + +class Threshold(CreatedMetaFields, UUIDModel): + """ + Threshold holds the configuration for a threshold. This can either be attached to an alert, or used as a standalone + object for other purposes. + """ + + team = models.ForeignKey("Team", on_delete=models.CASCADE) + insight = models.ForeignKey("posthog.Insight", on_delete=models.CASCADE) + + name = models.CharField(max_length=255, blank=True) + configuration = models.JSONField(default=dict) + + def clean(self): + config = InsightThreshold.model_validate(self.configuration) + if not config or not config.absoluteThreshold: + return + if config.absoluteThreshold.lower is not None and config.absoluteThreshold.upper is not None: + if config.absoluteThreshold.lower > config.absoluteThreshold.upper: + raise ValidationError("Lower threshold must be less than upper threshold") + + +class AlertConfiguration(CreatedMetaFields, UUIDModel): + ALERTS_PER_TEAM = 10 + + team = models.ForeignKey("Team", on_delete=models.CASCADE) + insight = models.ForeignKey("posthog.Insight", on_delete=models.CASCADE) + + name = models.CharField(max_length=255, blank=True) + subscribed_users = models.ManyToManyField( + "posthog.User", + through="posthog.AlertSubscription", + through_fields=("alert_configuration", "user"), + related_name="alert_configurations", + ) + + # The threshold to evaluate the alert against. If null, the alert must have other conditions to trigger. + threshold = models.ForeignKey(Threshold, on_delete=models.CASCADE, null=True, blank=True) + condition = models.JSONField(default=dict) + + STATE_CHOICES = [ + ("firing", "Firing"), + ("inactive", "Inactive"), + ] + state = models.CharField(max_length=10, choices=STATE_CHOICES, default="inactive") + enabled = models.BooleanField(default=True) + + last_notified_at = models.DateTimeField(null=True, blank=True) + + def __str__(self): + return f"{self.name} (Team: {self.team})" + + def save(self, *args, **kwargs): + if not self.enabled: + # When disabling an alert, set the state to inactive + self.state = "inactive" + if "update_fields" in kwargs: + kwargs["update_fields"].append("state") + + super().save(*args, **kwargs) + + def evaluate_condition(self, calculated_value) -> list[str]: + threshold = InsightThreshold.model_validate(self.threshold.configuration) if self.threshold else None + condition = AlertCondition.model_validate(self.condition) + validator = ConditionValidator(threshold=threshold, condition=condition) + return validator.validate(calculated_value) + + def add_check( + self, *, calculated_value: Optional[float], error: Optional[dict] = None + ) -> tuple["AlertCheck", list[str]]: + """Add a new AlertCheck, managing state transitions and cooldown.""" + matches = self.evaluate_condition(calculated_value) if calculated_value is not None else [] + targets_notified = {} + + # Determine the appropriate state for this check + if matches: + if self.state != "firing": + # Transition to firing state and send a notification + check_state = "firing" + self.last_notified_at = datetime.now(UTC) + targets_notified = {"users": list(self.subscribed_users.all().values_list("email", flat=True))} + else: + check_state = "firing" # Already firing, no new notification + matches = [] # Don't send duplicate notifications + else: + check_state = "not_met" + self.state = "inactive" # Set the Alert to inactive if the threshold is no longer met + # Optionally send a resolved notification + + alert_check = AlertCheck.objects.create( + alert_configuration=self, + calculated_value=calculated_value, + condition=self.condition, + targets_notified=targets_notified, + state=check_state, + error=error, + ) + + # Update the Alert state + if check_state == "firing": + self.state = "firing" + elif check_state == "not_met": + self.state = "inactive" + + self.save() + return alert_check, matches + + +class AlertSubscription(CreatedMetaFields, UUIDModel): + user = models.ForeignKey( + "User", + on_delete=models.CASCADE, + limit_choices_to={ + "is_active": True, + "organization_id": models.OuterRef("alert_configuration__team__organization_id"), + }, + related_name="alert_subscriptions", + ) + alert_configuration = models.ForeignKey(AlertConfiguration, on_delete=models.CASCADE) + subscribed = models.BooleanField(default=True) + + def __str__(self): + return f"AlertSubscription for {self.alert_configuration.name} by {self.user.email}" + + class Meta: + unique_together = ["user", "alert_configuration"] + + +class AlertCheck(UUIDModel): + alert_configuration = models.ForeignKey(AlertConfiguration, on_delete=models.CASCADE) + created_at = models.DateTimeField(auto_now_add=True) + calculated_value = models.FloatField(null=True, blank=True) + condition = models.JSONField(default=dict) # Snapshot of the condition at the time of the check + targets_notified = models.JSONField(default=dict) + error = models.JSONField(null=True, blank=True) + + STATE_CHOICES = [ + ("firing", "Firing"), + ("not_met", "Not Met"), + ] + state = models.CharField(max_length=10, choices=STATE_CHOICES, default="not_met") + + def __str__(self): + return f"AlertCheck for {self.alert_configuration.name} at {self.created_at}" - name: models.CharField = models.CharField(max_length=100) - target_value: models.TextField = models.TextField() - anomaly_condition: models.JSONField = models.JSONField(default=dict) + @classmethod + def clean_up_old_checks(cls) -> int: + retention_days = 14 + oldest_allowed_date = datetime.now(UTC) - timedelta(days=retention_days) + rows_count, _ = cls.objects.filter(created_at__lt=oldest_allowed_date).delete() + return rows_count diff --git a/posthog/models/annotation.py b/posthog/models/annotation.py index e99274f242c9e..c7301fba040f0 100644 --- a/posthog/models/annotation.py +++ b/posthog/models/annotation.py @@ -15,25 +15,21 @@ class CreationType(models.TextChoices): USER = "USR", "user" GITHUB = "GIT", "GitHub" - content: models.CharField = models.CharField(max_length=400, null=True, blank=True) - created_at: models.DateTimeField = models.DateTimeField(default=timezone.now, null=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - dashboard_item: models.ForeignKey = models.ForeignKey( - "posthog.Insight", on_delete=models.SET_NULL, null=True, blank=True - ) - dashboard: models.ForeignKey = models.ForeignKey( - "posthog.Dashboard", on_delete=models.SET_NULL, null=True, blank=True - ) - team: models.ForeignKey = models.ForeignKey("posthog.Team", on_delete=models.CASCADE) - organization: models.ForeignKey = models.ForeignKey("posthog.Organization", on_delete=models.CASCADE, null=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + content = models.CharField(max_length=400, null=True, blank=True) + created_at = models.DateTimeField(default=timezone.now, null=True) + updated_at = models.DateTimeField(auto_now=True) + dashboard_item = models.ForeignKey("posthog.Insight", on_delete=models.SET_NULL, null=True, blank=True) + dashboard = models.ForeignKey("posthog.Dashboard", on_delete=models.SET_NULL, null=True, blank=True) + team = models.ForeignKey("posthog.Team", on_delete=models.CASCADE) + organization = models.ForeignKey("posthog.Organization", on_delete=models.CASCADE, null=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) scope = models.CharField(max_length=24, choices=Scope.choices, default=Scope.INSIGHT) creation_type = models.CharField(max_length=3, choices=CreationType.choices, default=CreationType.USER) - date_marker: models.DateTimeField = models.DateTimeField(null=True, blank=True) - deleted: models.BooleanField = models.BooleanField(default=False) + date_marker = models.DateTimeField(null=True, blank=True) + deleted = models.BooleanField(default=False) # DEPRECATED: replaced by scope - apply_all: models.BooleanField = models.BooleanField(null=True) + apply_all = models.BooleanField(null=True) @property def insight_short_id(self) -> Optional[str]: diff --git a/posthog/models/async_deletion/async_deletion.py b/posthog/models/async_deletion/async_deletion.py index a851fa513f526..e89cf5f1be073 100644 --- a/posthog/models/async_deletion/async_deletion.py +++ b/posthog/models/async_deletion/async_deletion.py @@ -11,6 +11,25 @@ class DeletionType(models.IntegerChoices): # This model represents deletions that should delete (other, unrelated) data async class AsyncDeletion(models.Model): + id = models.BigAutoField(primary_key=True) + # Should be one of the DeletionType enum + deletion_type = models.PositiveSmallIntegerField(null=False, blank=False, choices=DeletionType.choices) + + # Team whose data shall be deleted. This is not a foreign key, because we still need this value + # when the team is gone (we are talking about _async_ deletions after all) + team_id = models.IntegerField() + + # id for team (same as team_id column), uuid for person, key for group + key = models.CharField(max_length=400, null=False, blank=False) + # Only populated for group deletions + group_type_index = models.IntegerField(null=True, blank=False) + + created_by = models.ForeignKey("User", null=True, on_delete=models.SET_NULL) + created_at = models.DateTimeField(auto_now_add=True) + + # When was the data verified to be deleted - we can skip it in the next round + delete_verified_at = models.DateTimeField(null=True, blank=True) + class Meta: constraints = [ # :TRICKY: Postgres does not handle UNIQUE and NULL together well, so create 2 indexes. @@ -26,24 +45,3 @@ class Meta: ), ] indexes = [models.Index(name="delete_verified_at index", fields=["delete_verified_at"])] - - id: models.BigAutoField = models.BigAutoField(primary_key=True) - # Should be one of the DeletionType enum - deletion_type: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( - null=False, blank=False, choices=DeletionType.choices - ) - - # Team whose data shall be deleted. This is not a foreign key, because we still need this value - # when the team is gone (we are talking about _async_ deletions after all) - team_id: models.IntegerField = models.IntegerField() - - # id for team (same as team_id column), uuid for person, key for group - key: models.CharField = models.CharField(max_length=400, null=False, blank=False) - # Only populated for group deletions - group_type_index: models.IntegerField = models.IntegerField(null=True, blank=False) - - created_by: models.ForeignKey = models.ForeignKey("User", null=True, on_delete=models.SET_NULL) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - - # When was the data verified to be deleted - we can skip it in the next round - delete_verified_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) diff --git a/posthog/models/async_migration.py b/posthog/models/async_migration.py index 92d61fb5e3f33..4d44a3149fa44 100644 --- a/posthog/models/async_migration.py +++ b/posthog/models/async_migration.py @@ -13,39 +13,35 @@ class MigrationStatus: class AsyncMigrationError(models.Model): - id: models.BigAutoField = models.BigAutoField(primary_key=True) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) - description: models.TextField = models.TextField(null=False, blank=False) - async_migration: models.ForeignKey = models.ForeignKey("AsyncMigration", on_delete=models.CASCADE) + id = models.BigAutoField(primary_key=True) + created_at = models.DateTimeField(auto_now_add=True, blank=True) + description = models.TextField(null=False, blank=False) + async_migration = models.ForeignKey("AsyncMigration", on_delete=models.CASCADE) class AsyncMigration(models.Model): - class Meta: - constraints = [models.UniqueConstraint(fields=["name"], name="unique name")] - - id: models.BigAutoField = models.BigAutoField(primary_key=True) - name: models.CharField = models.CharField(max_length=50, null=False, blank=False) - description: models.CharField = models.CharField(max_length=400, null=True, blank=True) - progress: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(null=False, blank=False, default=0) - status: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( - null=False, blank=False, default=MigrationStatus.NotStarted - ) + id = models.BigAutoField(primary_key=True) + name = models.CharField(max_length=50, null=False, blank=False) + description = models.CharField(max_length=400, null=True, blank=True) + progress = models.PositiveSmallIntegerField(null=False, blank=False, default=0) + status = models.PositiveSmallIntegerField(null=False, blank=False, default=MigrationStatus.NotStarted) - current_operation_index: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( - null=False, blank=False, default=0 - ) - current_query_id: models.CharField = models.CharField(max_length=100, null=False, blank=False, default="") - celery_task_id: models.CharField = models.CharField(max_length=100, null=False, blank=False, default="") + current_operation_index = models.PositiveSmallIntegerField(null=False, blank=False, default=0) + current_query_id = models.CharField(max_length=100, null=False, blank=False, default="") + celery_task_id = models.CharField(max_length=100, null=False, blank=False, default="") - started_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) + started_at = models.DateTimeField(null=True, blank=True) # Can finish with status 'CompletedSuccessfully', 'Errored', or 'RolledBack' - finished_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) + finished_at = models.DateTimeField(null=True, blank=True) - posthog_min_version: models.CharField = models.CharField(max_length=20, null=True, blank=True) - posthog_max_version: models.CharField = models.CharField(max_length=20, null=True, blank=True) + posthog_min_version = models.CharField(max_length=20, null=True, blank=True) + posthog_max_version = models.CharField(max_length=20, null=True, blank=True) - parameters: models.JSONField = models.JSONField(default=dict) + parameters = models.JSONField(default=dict) + + class Meta: + constraints = [models.UniqueConstraint(fields=["name"], name="unique name")] def get_name_with_requirements(self) -> str: return ( diff --git a/posthog/models/cohort/cohort.py b/posthog/models/cohort/cohort.py index 832178d6f4c7a..525f21d01bebf 100644 --- a/posthog/models/cohort/cohort.py +++ b/posthog/models/cohort/cohort.py @@ -76,30 +76,30 @@ def create(self, *args: Any, **kwargs: Any): class Cohort(models.Model): - name: models.CharField = models.CharField(max_length=400, null=True, blank=True) - description: models.CharField = models.CharField(max_length=1000, blank=True) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - deleted: models.BooleanField = models.BooleanField(default=False) - filters: models.JSONField = models.JSONField(null=True, blank=True) - query: models.JSONField = models.JSONField(null=True, blank=True) - people: models.ManyToManyField = models.ManyToManyField("Person", through="CohortPeople") - version: models.IntegerField = models.IntegerField(blank=True, null=True) - pending_version: models.IntegerField = models.IntegerField(blank=True, null=True) - count: models.IntegerField = models.IntegerField(blank=True, null=True) - - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, blank=True, null=True) - created_at: models.DateTimeField = models.DateTimeField(default=timezone.now, blank=True, null=True) - - is_calculating: models.BooleanField = models.BooleanField(default=False) - last_calculation: models.DateTimeField = models.DateTimeField(blank=True, null=True) - errors_calculating: models.IntegerField = models.IntegerField(default=0) - - is_static: models.BooleanField = models.BooleanField(default=False) - - objects = CohortManager() + name = models.CharField(max_length=400, null=True, blank=True) + description = models.CharField(max_length=1000, blank=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + deleted = models.BooleanField(default=False) + filters = models.JSONField(null=True, blank=True) + query = models.JSONField(null=True, blank=True) + people = models.ManyToManyField("Person", through="CohortPeople") + version = models.IntegerField(blank=True, null=True) + pending_version = models.IntegerField(blank=True, null=True) + count = models.IntegerField(blank=True, null=True) + + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, blank=True, null=True) + created_at = models.DateTimeField(default=timezone.now, blank=True, null=True) + + is_calculating = models.BooleanField(default=False) + last_calculation = models.DateTimeField(blank=True, null=True) + errors_calculating = models.IntegerField(default=0) + + is_static = models.BooleanField(default=False) # deprecated in favor of filters - groups: models.JSONField = models.JSONField(default=list) + groups = models.JSONField(default=list) + + objects = CohortManager() def __str__(self): return self.name @@ -368,10 +368,10 @@ def get_and_update_pending_version(cohort: Cohort): class CohortPeople(models.Model): - id: models.BigAutoField = models.BigAutoField(primary_key=True) - cohort: models.ForeignKey = models.ForeignKey("Cohort", on_delete=models.CASCADE) - person: models.ForeignKey = models.ForeignKey("Person", on_delete=models.CASCADE) - version: models.IntegerField = models.IntegerField(blank=True, null=True) + id = models.BigAutoField(primary_key=True) + cohort = models.ForeignKey("Cohort", on_delete=models.CASCADE) + person = models.ForeignKey("Person", on_delete=models.CASCADE) + version = models.IntegerField(blank=True, null=True) class Meta: indexes = [models.Index(fields=["cohort_id", "person_id"])] diff --git a/posthog/models/cohort/util.py b/posthog/models/cohort/util.py index eec8a86ddc316..98f04b2a16808 100644 --- a/posthog/models/cohort/util.py +++ b/posthog/models/cohort/util.py @@ -311,7 +311,7 @@ def recalculate_cohortpeople( recalcluate_cohortpeople_sql = RECALCULATE_COHORT_BY_ID.format(cohort_filter=cohort_query) - tag_queries(kind="cohort_calculation", team_id=cohort.team_id) + tag_queries(kind="cohort_calculation", team_id=cohort.team_id, query_type="CohortsQuery") if initiating_user_id: tag_queries(user_id=initiating_user_id) diff --git a/posthog/models/comment.py b/posthog/models/comment.py index a1fff705c0632..11c8128ef5811 100644 --- a/posthog/models/comment.py +++ b/posthog/models/comment.py @@ -9,12 +9,12 @@ class Comment(UUIDModel): - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - content: models.TextField = models.TextField(blank=True, null=True) - version: models.IntegerField = models.IntegerField(default=0) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) - deleted: models.BooleanField = models.BooleanField(null=True, blank=True, default=False) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + content = models.TextField(blank=True, null=True) + version = models.IntegerField(default=0) + created_at = models.DateTimeField(auto_now_add=True, blank=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + deleted = models.BooleanField(null=True, blank=True, default=False) # Loose relationship modelling to other PostHog resources item_id = models.CharField(max_length=72, null=True) @@ -22,7 +22,7 @@ class Comment(UUIDModel): scope = models.CharField(max_length=79, null=False) # Threads/replies are simply comments with a source_comment_id - source_comment: models.ForeignKey = models.ForeignKey("Comment", on_delete=models.CASCADE, null=True, blank=True) + source_comment = models.ForeignKey("Comment", on_delete=models.CASCADE, null=True, blank=True) class Meta: indexes = [models.Index(fields=["team_id", "scope", "item_id"])] diff --git a/posthog/models/dashboard.py b/posthog/models/dashboard.py index f4fe641c004e2..b765c6154f815 100644 --- a/posthog/models/dashboard.py +++ b/posthog/models/dashboard.py @@ -40,17 +40,17 @@ class PrivilegeLevel(models.IntegerChoices): CAN_VIEW = 21, "Can view dashboard" CAN_EDIT = 37, "Can edit dashboard" - name: models.CharField = models.CharField(max_length=400, null=True, blank=True) - description: models.TextField = models.TextField(blank=True) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - pinned: models.BooleanField = models.BooleanField(default=False) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) - deleted: models.BooleanField = models.BooleanField(default=False) - last_accessed_at: models.DateTimeField = models.DateTimeField(blank=True, null=True) - filters: models.JSONField = models.JSONField(default=dict) - creation_mode: models.CharField = models.CharField(max_length=16, default="default", choices=CreationMode.choices) - restriction_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( + name = models.CharField(max_length=400, null=True, blank=True) + description = models.TextField(blank=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + pinned = models.BooleanField(default=False) + created_at = models.DateTimeField(auto_now_add=True, blank=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + deleted = models.BooleanField(default=False) + last_accessed_at = models.DateTimeField(blank=True, null=True) + filters = models.JSONField(default=dict) + creation_mode = models.CharField(max_length=16, default="default", choices=CreationMode.choices) + restriction_level = models.PositiveSmallIntegerField( default=RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT, choices=RestrictionLevel.choices, ) @@ -72,9 +72,9 @@ class PrivilegeLevel(models.IntegerChoices): ) # DEPRECATED: using the new "sharing" relation instead - share_token: models.CharField = models.CharField(max_length=400, null=True, blank=True) + share_token = models.CharField(max_length=400, null=True, blank=True) # DEPRECATED: using the new "is_sharing_enabled" relation instead - is_shared: models.BooleanField = models.BooleanField(default=False) + is_shared = models.BooleanField(default=False) objects = DashboardManager() objects_including_soft_deleted: models.Manager["Dashboard"] = models.Manager() diff --git a/posthog/models/dashboard_templates.py b/posthog/models/dashboard_templates.py index 3d2c4a7678597..bf9d6dc733eb2 100644 --- a/posthog/models/dashboard_templates.py +++ b/posthog/models/dashboard_templates.py @@ -21,22 +21,22 @@ class Scope(models.TextChoices): GLOBAL = "global", "Global" FEATURE_FLAG = "feature_flag", "Feature Flag" - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, null=True) - template_name: models.CharField = models.CharField(max_length=400, null=True, blank=True) - dashboard_description: models.CharField = models.CharField(max_length=400, null=True, blank=True) - dashboard_filters: models.JSONField = models.JSONField(null=True, blank=True) - tiles: models.JSONField = models.JSONField(blank=True, null=True) - variables: models.JSONField = models.JSONField(null=True, blank=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE, null=True) + template_name = models.CharField(max_length=400, null=True, blank=True) + dashboard_description = models.CharField(max_length=400, null=True, blank=True) + dashboard_filters = models.JSONField(null=True, blank=True) + tiles = models.JSONField(blank=True, null=True) + variables = models.JSONField(null=True, blank=True) tags: ArrayField = ArrayField(models.CharField(max_length=255), blank=True, null=True) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True, null=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) - deleted: models.BooleanField = models.BooleanField(blank=True, null=True) - image_url: models.CharField = models.CharField(max_length=8201, null=True, blank=True) - scope: models.CharField = models.CharField(max_length=24, choices=Scope.choices, null=True, blank=True) + created_at = models.DateTimeField(auto_now_add=True, blank=True, null=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + deleted = models.BooleanField(blank=True, null=True) + image_url = models.CharField(max_length=8201, null=True, blank=True) + scope = models.CharField(max_length=24, choices=Scope.choices, null=True, blank=True) # URL length for browsers can be as much as 64Kb # see https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers # but GitHub apparently is more likely 8kb https://stackoverflow.com/a/64565317 - github_url: models.CharField = models.CharField(max_length=8201, null=True) + github_url = models.CharField(max_length=8201, null=True) class Meta: constraints = [ diff --git a/posthog/models/dashboard_tile.py b/posthog/models/dashboard_tile.py index b24fdf1989bb6..c5e0134409d55 100644 --- a/posthog/models/dashboard_tile.py +++ b/posthog/models/dashboard_tile.py @@ -9,11 +9,11 @@ class Text(models.Model): - body: models.CharField = models.CharField(max_length=4000, null=True, blank=True) + body = models.CharField(max_length=4000, null=True, blank=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) - last_modified_at: models.DateTimeField = models.DateTimeField(default=timezone.now) - last_modified_by: models.ForeignKey = models.ForeignKey( + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + last_modified_at = models.DateTimeField(default=timezone.now) + last_modified_by = models.ForeignKey( "User", on_delete=models.SET_NULL, null=True, @@ -21,7 +21,7 @@ class Text(models.Model): related_name="modified_text_tiles", ) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) + team = models.ForeignKey("Team", on_delete=models.CASCADE) class DashboardTileManager(models.Manager): @@ -46,16 +46,16 @@ class DashboardTile(models.Model): ) # Tile layout and style - layouts: models.JSONField = models.JSONField(default=dict) - color: models.CharField = models.CharField(max_length=400, null=True, blank=True) + layouts = models.JSONField(default=dict) + color = models.CharField(max_length=400, null=True, blank=True) # caching for this dashboard & insight filter combination - filters_hash: models.CharField = models.CharField(max_length=400, null=True, blank=True) - last_refresh: models.DateTimeField = models.DateTimeField(blank=True, null=True) - refreshing: models.BooleanField = models.BooleanField(null=True) - refresh_attempt: models.IntegerField = models.IntegerField(null=True, blank=True) + filters_hash = models.CharField(max_length=400, null=True, blank=True) + last_refresh = models.DateTimeField(blank=True, null=True) + refreshing = models.BooleanField(null=True) + refresh_attempt = models.IntegerField(null=True, blank=True) - deleted: models.BooleanField = models.BooleanField(null=True, blank=True) + deleted = models.BooleanField(null=True, blank=True) objects = DashboardTileManager() objects_including_soft_deleted: models.Manager["DashboardTile"] = models.Manager() diff --git a/posthog/models/early_access_feature.py b/posthog/models/early_access_feature.py index 3ec1c99543b9a..453e184c7e18b 100644 --- a/posthog/models/early_access_feature.py +++ b/posthog/models/early_access_feature.py @@ -11,13 +11,13 @@ class Stage(models.TextChoices): GENERAL_AVAILABILITY = "general-availability", "general availability" ARCHIVED = "archived", "archived" - team: models.ForeignKey = models.ForeignKey( + team = models.ForeignKey( "posthog.Team", on_delete=models.CASCADE, related_name="features", related_query_name="feature", ) - feature_flag: models.ForeignKey = models.ForeignKey( + feature_flag = models.ForeignKey( "posthog.FeatureFlag", null=True, blank=True, @@ -25,11 +25,11 @@ class Stage(models.TextChoices): related_name="features", related_query_name="feature", ) - name: models.CharField = models.CharField(max_length=200) - description: models.TextField = models.TextField(blank=True) - stage: models.CharField = models.CharField(max_length=40, choices=Stage.choices) - documentation_url: models.URLField = models.URLField(max_length=800, blank=True) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) + name = models.CharField(max_length=200) + description = models.TextField(blank=True) + stage = models.CharField(max_length=40, choices=Stage.choices) + documentation_url = models.URLField(max_length=800, blank=True) + created_at = models.DateTimeField(auto_now_add=True) def __str__(self) -> str: return self.name diff --git a/posthog/models/element/element.py b/posthog/models/element/element.py index 4beeb5400851b..4727a7f9592ae 100644 --- a/posthog/models/element/element.py +++ b/posthog/models/element/element.py @@ -6,17 +6,17 @@ class Element(models.Model): USEFUL_ELEMENTS = ["a", "button", "input", "select", "textarea", "label"] - text: models.CharField = models.CharField(max_length=10_000, null=True, blank=True) - tag_name: models.CharField = models.CharField(max_length=1_000, null=True, blank=True) - href: models.CharField = models.CharField(max_length=10_000, null=True, blank=True) - attr_id: models.CharField = models.CharField(max_length=10_000, null=True, blank=True) + text = models.CharField(max_length=10_000, null=True, blank=True) + tag_name = models.CharField(max_length=1_000, null=True, blank=True) + href = models.CharField(max_length=10_000, null=True, blank=True) + attr_id = models.CharField(max_length=10_000, null=True, blank=True) attr_class = ArrayField(models.CharField(max_length=200, blank=True), null=True, blank=True) - nth_child: models.IntegerField = models.IntegerField(null=True, blank=True) - nth_of_type: models.IntegerField = models.IntegerField(null=True, blank=True) - attributes: models.JSONField = models.JSONField(default=dict) - event: models.ForeignKey = models.ForeignKey("Event", on_delete=models.CASCADE, null=True, blank=True) - order: models.IntegerField = models.IntegerField(null=True, blank=True) - group: models.ForeignKey = models.ForeignKey("ElementGroup", on_delete=models.CASCADE, null=True, blank=True) + nth_child = models.IntegerField(null=True, blank=True) + nth_of_type = models.IntegerField(null=True, blank=True) + attributes = models.JSONField(default=dict) + event = models.ForeignKey("Event", on_delete=models.CASCADE, null=True, blank=True) + order = models.IntegerField(null=True, blank=True) + group = models.ForeignKey("ElementGroup", on_delete=models.CASCADE, null=True, blank=True) parse_attributes_regex = re.compile(r"(?P(?P.*?)\=\"(?P.*?[^\\])\")", re.MULTILINE) diff --git a/posthog/models/element_group.py b/posthog/models/element_group.py index c4571180c4ecb..50de954ed1a9e 100644 --- a/posthog/models/element_group.py +++ b/posthog/models/element_group.py @@ -41,8 +41,8 @@ def create(self, *args: Any, **kwargs: Any): class ElementGroup(models.Model): - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) - hash: models.CharField = models.CharField(max_length=400, null=True, blank=True) + team = models.ForeignKey(Team, on_delete=models.CASCADE) + hash = models.CharField(max_length=400, null=True, blank=True) objects = ElementGroupManager() diff --git a/posthog/models/error_tracking/error_tracking.py b/posthog/models/error_tracking/error_tracking.py index ee7c0bfe3cb1e..1007e12699863 100644 --- a/posthog/models/error_tracking/error_tracking.py +++ b/posthog/models/error_tracking/error_tracking.py @@ -12,8 +12,8 @@ class Status(models.TextChoices): RESOLVED = "resolved", "Resolved" PENDING_RELEASE = "pending_release", "Pending release" - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + created_at = models.DateTimeField(auto_now_add=True, blank=True) fingerprint: ArrayField = ArrayField(models.TextField(null=False, blank=False), null=False, blank=False) merged_fingerprints: ArrayField = ArrayField( ArrayField(models.TextField(null=False, blank=False), null=False, blank=False), @@ -21,10 +21,8 @@ class Status(models.TextChoices): blank=False, default=list, ) - status: models.CharField = models.CharField( - max_length=40, choices=Status.choices, default=Status.ACTIVE, null=False - ) - assignee: models.ForeignKey = models.ForeignKey( + status = models.CharField(max_length=40, choices=Status.choices, default=Status.ACTIVE, null=False) + assignee = models.ForeignKey( "User", on_delete=models.SET_NULL, null=True, diff --git a/posthog/models/event/event.py b/posthog/models/event/event.py index 184fffb18afa6..64448b8073d6c 100644 --- a/posthog/models/event/event.py +++ b/posthog/models/event/event.py @@ -121,6 +121,18 @@ def _split(self, selector): class Event(models.Model): + created_at = models.DateTimeField(auto_now_add=True, null=True, blank=True) + team = models.ForeignKey(Team, on_delete=models.CASCADE) + event = models.CharField(max_length=200, null=True, blank=True) + distinct_id = models.CharField(max_length=200) + properties = models.JSONField(default=dict) + timestamp = models.DateTimeField(default=timezone.now, blank=True) + elements_hash = models.CharField(max_length=200, null=True, blank=True) + site_url = models.CharField(max_length=200, null=True, blank=True) + + # DEPRECATED: elements are stored against element groups now + elements = models.JSONField(default=list, null=True, blank=True) + class Meta: indexes = [ models.Index(fields=["elements_hash"]), @@ -132,15 +144,3 @@ class Meta: # adding this here to improve visibility. # models.Index(fields=["distinct_id"], name="idx_distinct_id"), ] - - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, null=True, blank=True) - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) - event: models.CharField = models.CharField(max_length=200, null=True, blank=True) - distinct_id: models.CharField = models.CharField(max_length=200) - properties: models.JSONField = models.JSONField(default=dict) - timestamp: models.DateTimeField = models.DateTimeField(default=timezone.now, blank=True) - elements_hash: models.CharField = models.CharField(max_length=200, null=True, blank=True) - site_url: models.CharField = models.CharField(max_length=200, null=True, blank=True) - - # DEPRECATED: elements are stored against element groups now - elements: models.JSONField = models.JSONField(default=list, null=True, blank=True) diff --git a/posthog/models/event_buffer.py b/posthog/models/event_buffer.py index f8cfed80775cd..b52d957a3f396 100644 --- a/posthog/models/event_buffer.py +++ b/posthog/models/event_buffer.py @@ -2,6 +2,6 @@ class EventBuffer(models.Model): - event: models.JSONField = models.JSONField(null=True, blank=True) - process_at: models.DateTimeField = models.DateTimeField() - locked: models.BooleanField = models.BooleanField() + event = models.JSONField(null=True, blank=True) + process_at = models.DateTimeField() + locked = models.BooleanField() diff --git a/posthog/models/event_definition.py b/posthog/models/event_definition.py index 5b22a9e6a2869..3f5c8d8a6f45b 100644 --- a/posthog/models/event_definition.py +++ b/posthog/models/event_definition.py @@ -7,23 +7,23 @@ class EventDefinition(UUIDModel): - team: models.ForeignKey = models.ForeignKey( + team = models.ForeignKey( Team, on_delete=models.CASCADE, related_name="event_definitions", related_query_name="team", ) - name: models.CharField = models.CharField(max_length=400) - created_at: models.DateTimeField = models.DateTimeField(default=timezone.now, null=True) - last_seen_at: models.DateTimeField = models.DateTimeField(default=None, null=True) + name = models.CharField(max_length=400) + created_at = models.DateTimeField(default=timezone.now, null=True) + last_seen_at = models.DateTimeField(default=None, null=True) # DEPRECATED # Number of times the event has been used in a query in the last 30 rolling days (computed asynchronously every other blue moon) - query_usage_30_day: models.IntegerField = models.IntegerField(default=None, null=True) + query_usage_30_day = models.IntegerField(default=None, null=True) # DEPRECATED # Volume of events in the last 30 rolling days (computed asynchronously) - volume_30_day: models.IntegerField = models.IntegerField(default=None, null=True) + volume_30_day = models.IntegerField(default=None, null=True) class Meta: unique_together = ("team", "name") diff --git a/posthog/models/event_property.py b/posthog/models/event_property.py index 4824248ddfce2..a5087c5b3f667 100644 --- a/posthog/models/event_property.py +++ b/posthog/models/event_property.py @@ -5,9 +5,9 @@ class EventProperty(models.Model): - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) - event: models.CharField = models.CharField(max_length=400, null=False) - property: models.CharField = models.CharField(max_length=400, null=False) + team = models.ForeignKey(Team, on_delete=models.CASCADE) + event = models.CharField(max_length=400, null=False) + property = models.CharField(max_length=400, null=False) class Meta: constraints = [ diff --git a/posthog/models/experiment.py b/posthog/models/experiment.py index d20e59a96b3d0..9534c2a28914f 100644 --- a/posthog/models/experiment.py +++ b/posthog/models/experiment.py @@ -3,12 +3,12 @@ class Experiment(models.Model): - name: models.CharField = models.CharField(max_length=400) - description: models.CharField = models.CharField(max_length=400, null=True, blank=True) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) + name = models.CharField(max_length=400) + description = models.CharField(max_length=400, null=True, blank=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE) # Filters define the target metric of an Experiment - filters: models.JSONField = models.JSONField(default=dict) + filters = models.JSONField(default=dict) # Parameters include configuration fields for the experiment: What the control & test variant are called, # and any test significance calculation parameters @@ -18,19 +18,19 @@ class Experiment(models.Model): # recommended_sample_size: number # feature_flag_variants: { key: string, name: string, rollout_percentage: number }[] # custom_exposure_filter: Filter json - parameters: models.JSONField = models.JSONField(default=dict, null=True) + parameters = models.JSONField(default=dict, null=True) # A list of filters for secondary metrics - secondary_metrics: models.JSONField = models.JSONField(default=list, null=True) - - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True) - feature_flag: models.ForeignKey = models.ForeignKey("FeatureFlag", blank=False, on_delete=models.RESTRICT) - exposure_cohort: models.ForeignKey = models.ForeignKey("Cohort", on_delete=models.SET_NULL, null=True) - start_date: models.DateTimeField = models.DateTimeField(null=True) - end_date: models.DateTimeField = models.DateTimeField(null=True) - created_at: models.DateTimeField = models.DateTimeField(default=timezone.now) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - archived: models.BooleanField = models.BooleanField(default=False) + secondary_metrics = models.JSONField(default=list, null=True) + + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True) + feature_flag = models.ForeignKey("FeatureFlag", blank=False, on_delete=models.RESTRICT) + exposure_cohort = models.ForeignKey("Cohort", on_delete=models.SET_NULL, null=True) + start_date = models.DateTimeField(null=True) + end_date = models.DateTimeField(null=True) + created_at = models.DateTimeField(default=timezone.now) + updated_at = models.DateTimeField(auto_now=True) + archived = models.BooleanField(default=False) def get_feature_flag_key(self): return self.feature_flag.key diff --git a/posthog/models/exported_asset.py b/posthog/models/exported_asset.py index 7d4746234aba5..1d5e4e1c67596 100644 --- a/posthog/models/exported_asset.py +++ b/posthog/models/exported_asset.py @@ -48,30 +48,28 @@ class ExportFormat(models.TextChoices): SUPPORTED_FORMATS = [ExportFormat.PNG, ExportFormat.CSV, ExportFormat.XLSX] # Relations - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) + team = models.ForeignKey("Team", on_delete=models.CASCADE) dashboard = models.ForeignKey("posthog.Dashboard", on_delete=models.CASCADE, null=True) insight = models.ForeignKey("posthog.Insight", on_delete=models.CASCADE, null=True) # Content related fields - export_format: models.CharField = models.CharField(max_length=100, choices=ExportFormat.choices) - content: models.BinaryField = models.BinaryField(null=True) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) + export_format = models.CharField(max_length=100, choices=ExportFormat.choices) + content = models.BinaryField(null=True) + created_at = models.DateTimeField(auto_now_add=True, blank=True) # DateTime after the created_at after which this asset should be deleted # ExportedAssets are *not* deleted immediately after the TTL period has passed # the object manager has been altered to exclude these assets # to allow for lazy deletes - expires_after: models.DateTimeField = models.DateTimeField(null=True, blank=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + expires_after = models.DateTimeField(null=True, blank=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) # for example holds filters for CSV exports - export_context: models.JSONField = models.JSONField(null=True, blank=True) + export_context = models.JSONField(null=True, blank=True) # path in object storage or some other location identifier for the asset # 1000 characters would hold a 20 UUID forward slash separated path with space to spare - content_location: models.TextField = models.TextField(null=True, blank=True, max_length=1000) + content_location = models.TextField(null=True, blank=True, max_length=1000) # DEPRECATED: We now use JWT for accessing assets - access_token: models.CharField = models.CharField( - max_length=400, null=True, blank=True, default=get_default_access_token - ) + access_token = models.CharField(max_length=400, null=True, blank=True, default=get_default_access_token) # replace the default manager with one that filters out TTL deleted objects (before their deletion is processed) objects = ExportedAssetManager() diff --git a/posthog/models/feature_flag/feature_flag.py b/posthog/models/feature_flag/feature_flag.py index ff9f091957066..22a866045a6a5 100644 --- a/posthog/models/feature_flag/feature_flag.py +++ b/posthog/models/feature_flag/feature_flag.py @@ -25,39 +25,37 @@ class FeatureFlag(models.Model): - class Meta: - constraints = [models.UniqueConstraint(fields=["team", "key"], name="unique key for team")] - # When adding new fields, make sure to update organization_feature_flags.py::copy_flags - key: models.CharField = models.CharField(max_length=400) - name: models.TextField = models.TextField( + key = models.CharField(max_length=400) + name = models.TextField( blank=True ) # contains description for the FF (field name `name` is kept for backwards-compatibility) - filters: models.JSONField = models.JSONField(default=dict) - rollout_percentage: models.IntegerField = models.IntegerField(null=True, blank=True) + filters = models.JSONField(default=dict) + rollout_percentage = models.IntegerField(null=True, blank=True) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True) - created_at: models.DateTimeField = models.DateTimeField(default=timezone.now) - deleted: models.BooleanField = models.BooleanField(default=False) - active: models.BooleanField = models.BooleanField(default=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True) + created_at = models.DateTimeField(default=timezone.now) + deleted = models.BooleanField(default=False) + active = models.BooleanField(default=True) - rollback_conditions: models.JSONField = models.JSONField(null=True, blank=True) - performed_rollback: models.BooleanField = models.BooleanField(null=True, blank=True) + rollback_conditions = models.JSONField(null=True, blank=True) + performed_rollback = models.BooleanField(null=True, blank=True) - ensure_experience_continuity: models.BooleanField = models.BooleanField(default=False, null=True, blank=True) - usage_dashboard: models.ForeignKey = models.ForeignKey( - "Dashboard", on_delete=models.SET_NULL, null=True, blank=True - ) - analytics_dashboards: models.ManyToManyField = models.ManyToManyField( + ensure_experience_continuity = models.BooleanField(default=False, null=True, blank=True) + usage_dashboard = models.ForeignKey("Dashboard", on_delete=models.SET_NULL, null=True, blank=True) + analytics_dashboards = models.ManyToManyField( "Dashboard", through="FeatureFlagDashboards", related_name="analytics_dashboards", related_query_name="analytics_dashboard", ) # whether a feature is sending us rich analytics, like views & interactions. - has_enriched_analytics: models.BooleanField = models.BooleanField(default=False, null=True, blank=True) + has_enriched_analytics = models.BooleanField(default=False, null=True, blank=True) + + class Meta: + constraints = [models.UniqueConstraint(fields=["team", "key"], name="unique key for team")] def __str__(self): return f"{self.key} ({self.pk})" @@ -362,6 +360,15 @@ def refresh_flag_cache_on_updates(sender, instance, **kwargs): class FeatureFlagHashKeyOverride(models.Model): + # Can't use a foreign key to feature_flag_key directly, since + # the unique constraint is on (team_id+key), and not just key. + # A standard id foreign key leads to INNER JOINs every time we want to get the key + # and we only ever want to get the key. + feature_flag_key = models.CharField(max_length=400) + person = models.ForeignKey("Person", on_delete=models.CASCADE) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + hash_key = models.CharField(max_length=400) + class Meta: constraints = [ models.UniqueConstraint( @@ -370,18 +377,14 @@ class Meta: ) ] - # Can't use a foreign key to feature_flag_key directly, since - # the unique constraint is on (team_id+key), and not just key. - # A standard id foreign key leads to INNER JOINs every time we want to get the key - # and we only ever want to get the key. - feature_flag_key: models.CharField = models.CharField(max_length=400) - person: models.ForeignKey = models.ForeignKey("Person", on_delete=models.CASCADE) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - hash_key: models.CharField = models.CharField(max_length=400) - # DEPRECATED: This model is no longer used, but it's not deleted to avoid downtime class FeatureFlagOverride(models.Model): + feature_flag = models.ForeignKey("FeatureFlag", on_delete=models.CASCADE) + user = models.ForeignKey("User", on_delete=models.CASCADE) + override_value = models.JSONField() + team = models.ForeignKey("Team", on_delete=models.CASCADE) + class Meta: constraints = [ models.UniqueConstraint( @@ -390,11 +393,6 @@ class Meta: ) ] - feature_flag: models.ForeignKey = models.ForeignKey("FeatureFlag", on_delete=models.CASCADE) - user: models.ForeignKey = models.ForeignKey("User", on_delete=models.CASCADE) - override_value: models.JSONField = models.JSONField() - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - def set_feature_flags_for_team_in_cache( team_id: int, @@ -443,10 +441,10 @@ def get_feature_flags_for_team_in_cache(team_id: int) -> Optional[list[FeatureFl class FeatureFlagDashboards(models.Model): - feature_flag: models.ForeignKey = models.ForeignKey("FeatureFlag", on_delete=models.CASCADE) - dashboard: models.ForeignKey = models.ForeignKey("Dashboard", on_delete=models.CASCADE) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, null=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True, null=True) + feature_flag = models.ForeignKey("FeatureFlag", on_delete=models.CASCADE) + dashboard = models.ForeignKey("Dashboard", on_delete=models.CASCADE) + created_at = models.DateTimeField(auto_now_add=True, null=True) + updated_at = models.DateTimeField(auto_now=True, null=True) class Meta: constraints = [ diff --git a/posthog/models/feedback/survey.py b/posthog/models/feedback/survey.py index d7d2008868c90..064206e6e92d9 100644 --- a/posthog/models/feedback/survey.py +++ b/posthog/models/feedback/survey.py @@ -22,15 +22,15 @@ class SurveyType(models.TextChoices): class Meta: constraints = [models.UniqueConstraint(fields=["team", "name"], name="unique survey name for team")] - team: models.ForeignKey = models.ForeignKey( + team = models.ForeignKey( "posthog.Team", on_delete=models.CASCADE, related_name="surveys", related_query_name="survey", ) - name: models.CharField = models.CharField(max_length=400) - description: models.TextField = models.TextField(blank=True) - linked_flag: models.ForeignKey = models.ForeignKey( + name = models.CharField(max_length=400) + description = models.TextField(blank=True) + linked_flag = models.ForeignKey( "posthog.FeatureFlag", null=True, blank=True, @@ -38,7 +38,7 @@ class Meta: related_name="surveys_linked_flag", related_query_name="survey_linked_flag", ) - targeting_flag: models.ForeignKey = models.ForeignKey( + targeting_flag = models.ForeignKey( "posthog.FeatureFlag", null=True, blank=True, @@ -46,7 +46,7 @@ class Meta: related_name="surveys_targeting_flag", related_query_name="survey_targeting_flag", ) - internal_targeting_flag: models.ForeignKey = models.ForeignKey( + internal_targeting_flag = models.ForeignKey( "posthog.FeatureFlag", null=True, blank=True, @@ -54,9 +54,9 @@ class Meta: related_name="surveys_internal_targeting_flag", related_query_name="survey_internal_targeting_flag", ) - type: models.CharField = models.CharField(max_length=40, choices=SurveyType.choices) - conditions: models.JSONField = models.JSONField(blank=True, null=True) - questions: models.JSONField = models.JSONField( + type = models.CharField(max_length=40, choices=SurveyType.choices) + conditions = models.JSONField(blank=True, null=True) + questions = models.JSONField( blank=True, null=True, help_text=""" @@ -141,19 +141,19 @@ class Meta: ``` """, ) - appearance: models.JSONField = models.JSONField(blank=True, null=True) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - created_by: models.ForeignKey = models.ForeignKey( + appearance = models.JSONField(blank=True, null=True) + created_at = models.DateTimeField(auto_now_add=True) + created_by = models.ForeignKey( "posthog.User", on_delete=models.SET_NULL, related_name="surveys", related_query_name="survey", null=True, ) - start_date: models.DateTimeField = models.DateTimeField(null=True) - end_date: models.DateTimeField = models.DateTimeField(null=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - archived: models.BooleanField = models.BooleanField(default=False) + start_date = models.DateTimeField(null=True) + end_date = models.DateTimeField(null=True) + updated_at = models.DateTimeField(auto_now=True) + archived = models.BooleanField(default=False) # It's not a strict limit as it's enforced in a periodic task responses_limit = models.PositiveIntegerField(null=True) diff --git a/posthog/models/filters/mixins/property.py b/posthog/models/filters/mixins/property.py index 2d615a7374efa..c8588653a2494 100644 --- a/posthog/models/filters/mixins/property.py +++ b/posthog/models/filters/mixins/property.py @@ -14,38 +14,18 @@ class PropertyMixin(BaseParamMixin): - @cached_property - def old_properties(self) -> list[Property]: - _props = self._data.get(PROPERTIES) - - if isinstance(_props, str): - try: - loaded_props = json.loads(_props) - except json.decoder.JSONDecodeError: - raise ValidationError("Properties are unparsable!") - else: - loaded_props = _props - - # if grouped properties - if (isinstance(loaded_props, dict) and "type" in loaded_props and "values" in loaded_props) or isinstance( - loaded_props, PropertyGroup - ): - # property_groups is main function from now on - # TODO: this function will go away at end of migration - return [] - else: - # old style dict properties or a list of properties - return self._parse_properties(loaded_props) - @cached_property def property_groups(self) -> PropertyGroup: - _props = self._data.get(PROPERTIES) + return self._parse_data(key=PROPERTIES) + + def _parse_data(self, key: str) -> PropertyGroup: + _props = self._data.get(key) if isinstance(_props, str): try: loaded_props = json.loads(_props) except json.decoder.JSONDecodeError: - raise ValidationError("Properties are unparsable!") + raise ValidationError("Data is unparsable!") else: loaded_props = _props @@ -62,7 +42,29 @@ def property_groups(self) -> PropertyGroup: return loaded_props # old properties - return PropertyGroup(type=PropertyOperatorType.AND, values=self.old_properties) + return PropertyGroup(type=PropertyOperatorType.AND, values=self.old_properties(key=key)) + + def old_properties(self, key: str) -> list[Property]: + _props = self._data.get(key) + + if isinstance(_props, str): + try: + loaded_props = json.loads(_props) + except json.decoder.JSONDecodeError: + raise ValidationError("Properties are unparsable!") + else: + loaded_props = _props + + # if grouped properties + if (isinstance(loaded_props, dict) and "type" in loaded_props and "values" in loaded_props) or isinstance( + loaded_props, PropertyGroup + ): + # property_groups is main function from now on + # TODO: this function will go away at end of migration + return [] + else: + # old style dict properties or a list of properties + return self._parse_properties(loaded_props) def _parse_properties(self, properties: Optional[Any]) -> list[Property]: if isinstance(properties, list): diff --git a/posthog/models/filters/mixins/session_recordings.py b/posthog/models/filters/mixins/session_recordings.py index d4968a8634852..aad2202dfdafe 100644 --- a/posthog/models/filters/mixins/session_recordings.py +++ b/posthog/models/filters/mixins/session_recordings.py @@ -4,8 +4,9 @@ from posthog.hogql import ast from posthog.constants import PERSON_UUID_FILTER, SESSION_RECORDINGS_FILTER_IDS, PropertyOperatorType from posthog.models.filters.mixins.common import BaseParamMixin +from posthog.models.filters.mixins.property import PropertyMixin from posthog.models.filters.mixins.utils import cached_property -from posthog.models.property import Property +from posthog.models.property import PropertyGroup class PersonUUIDMixin(BaseParamMixin): @@ -14,10 +15,10 @@ def person_uuid(self) -> Optional[str]: return self._data.get(PERSON_UUID_FILTER, None) -class SessionRecordingsMixin(BaseParamMixin): +class SessionRecordingsMixin(PropertyMixin, BaseParamMixin): @cached_property - def console_search_query(self) -> str | None: - return self._data.get("console_search_query", None) + def order(self) -> str: + return self._data.get("order", "start_time") # Supports a legacy use case where events were ORed not ANDed # Can be removed and replaced with ast_operand once the new universal replay filtering is out @@ -27,8 +28,10 @@ def events_operand(self) -> type[Union[ast.And, ast.Or]]: return ast.And if operand == "AND" else ast.Or @cached_property - def _operand(self) -> Literal["AND"] | Literal["OR"]: - return self._data.get("operand", "AND") + def console_log_filters(self) -> PropertyGroup: + property_group = self._parse_data(key="console_log_filters") + property_group.type = self.property_operand + return property_group @cached_property def property_operand(self) -> PropertyOperatorType: @@ -39,36 +42,8 @@ def ast_operand(self) -> type[Union[ast.And, ast.Or]]: return ast.And if self._operand == "AND" else ast.Or @cached_property - def console_logs_filter(self) -> list[Literal["error", "warn", "info"]]: - user_value = self._data.get("console_logs", None) or [] - if isinstance(user_value, str): - user_value = json.loads(user_value) - valid_values = [x for x in user_value if x in ["error", "warn", "info"]] - return valid_values - - @cached_property - def duration_type_filter(self) -> Literal["duration", "active_seconds", "inactive_seconds"]: - user_value = self._data.get("duration_type_filter", None) - if user_value in ["duration", "active_seconds", "inactive_seconds"]: - return user_value - else: - return "duration" - - @cached_property - def recording_duration_filter(self) -> Optional[Property]: - duration_filter_data_str = self._data.get("session_recording_duration", None) - if duration_filter_data_str: - filter_data = json.loads(duration_filter_data_str) - return Property(**filter_data) - return None - - @cached_property - def snapshot_source_filter(self) -> Optional[Property]: - snapshot_source_data_str = self._data.get("snapshot_source", None) - if isinstance(snapshot_source_data_str, str): - filter_data = json.loads(snapshot_source_data_str) - return Property(**filter_data) - return None + def _operand(self) -> Literal["AND"] | Literal["OR"]: + return self._data.get("operand", "AND") @cached_property def session_ids(self) -> Optional[list[str]]: @@ -92,3 +67,7 @@ def session_ids(self) -> Optional[list[str]]: # If the property is at all present, we assume that the user wants to filter by it return [] + + @cached_property + def having_predicates(self) -> PropertyGroup: + return self._parse_data(key="having_predicates") diff --git a/posthog/models/group/group.py b/posthog/models/group/group.py index f07f4229988ba..a77981e8def22 100644 --- a/posthog/models/group/group.py +++ b/posthog/models/group/group.py @@ -2,26 +2,26 @@ class Group(models.Model): - class Meta: - constraints = [ - models.UniqueConstraint( - fields=["team_id", "group_key", "group_type_index"], - name="unique team_id/group_key/group_type_index combo", - ) - ] - - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - group_key: models.CharField = models.CharField(max_length=400, null=False, blank=False) - group_type_index: models.IntegerField = models.IntegerField(null=False, blank=False) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + group_key = models.CharField(max_length=400, null=False, blank=False) + group_type_index = models.IntegerField(null=False, blank=False) - group_properties: models.JSONField = models.JSONField(default=dict) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) + group_properties = models.JSONField(default=dict) + created_at = models.DateTimeField(auto_now_add=True) # used to prevent race conditions with set and set_once - properties_last_updated_at: models.JSONField = models.JSONField(default=dict) + properties_last_updated_at = models.JSONField(default=dict) # used for evaluating if we need to override the value or not (value: set or set_once) - properties_last_operation: models.JSONField = models.JSONField(default=dict) + properties_last_operation = models.JSONField(default=dict) # current version of the group, used to sync with ClickHouse and collapse rows correctly - version: models.BigIntegerField = models.BigIntegerField(null=False) + version = models.BigIntegerField(null=False) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["team_id", "group_key", "group_type_index"], + name="unique team_id/group_key/group_type_index combo", + ) + ] diff --git a/posthog/models/group_type_mapping.py b/posthog/models/group_type_mapping.py index ed4a19164f4fb..8dcb9cd74f1fb 100644 --- a/posthog/models/group_type_mapping.py +++ b/posthog/models/group_type_mapping.py @@ -4,6 +4,13 @@ # This table is responsible for mapping between group types for a Team/Project and event columns # to add group keys class GroupTypeMapping(models.Model): + team = models.ForeignKey("Team", on_delete=models.CASCADE) + group_type = models.CharField(max_length=400, null=False, blank=False) + group_type_index = models.IntegerField(null=False, blank=False) + # Used to display in UI + name_singular = models.CharField(max_length=400, null=True, blank=True) + name_plural = models.CharField(max_length=400, null=True, blank=True) + class Meta: constraints = [ models.UniqueConstraint(fields=["team", "group_type"], name="unique group types for team"), @@ -16,10 +23,3 @@ class Meta: name="group_type_index is less than or equal 5", ), ] - - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - group_type: models.CharField = models.CharField(max_length=400, null=False, blank=False) - group_type_index: models.IntegerField = models.IntegerField(null=False, blank=False) - # Used to display in UI - name_singular: models.CharField = models.CharField(max_length=400, null=True, blank=True) - name_plural: models.CharField = models.CharField(max_length=400, null=True, blank=True) diff --git a/posthog/models/hog_functions/hog_function.py b/posthog/models/hog_functions/hog_function.py index ca6bb1f236105..529e822c86fd2 100644 --- a/posthog/models/hog_functions/hog_function.py +++ b/posthog/models/hog_functions/hog_function.py @@ -31,23 +31,23 @@ class HogFunctionState(enum.Enum): class HogFunction(UUIDModel): - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - name: models.CharField = models.CharField(max_length=400, null=True, blank=True) - description: models.TextField = models.TextField(blank=True, default="") - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) - deleted: models.BooleanField = models.BooleanField(default=False) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - enabled: models.BooleanField = models.BooleanField(default=False) - - icon_url: models.TextField = models.TextField(null=True, blank=True) - hog: models.TextField = models.TextField() - bytecode: models.JSONField = models.JSONField(null=True, blank=True) - inputs_schema: models.JSONField = models.JSONField(null=True) - inputs: models.JSONField = models.JSONField(null=True) - filters: models.JSONField = models.JSONField(null=True, blank=True) - masking: models.JSONField = models.JSONField(null=True, blank=True) - template_id: models.CharField = models.CharField(max_length=400, null=True, blank=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + name = models.CharField(max_length=400, null=True, blank=True) + description = models.TextField(blank=True, default="") + created_at = models.DateTimeField(auto_now_add=True, blank=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + deleted = models.BooleanField(default=False) + updated_at = models.DateTimeField(auto_now=True) + enabled = models.BooleanField(default=False) + + icon_url = models.TextField(null=True, blank=True) + hog = models.TextField() + bytecode = models.JSONField(null=True, blank=True) + inputs_schema = models.JSONField(null=True) + inputs = models.JSONField(null=True) + filters = models.JSONField(null=True, blank=True) + masking = models.JSONField(null=True, blank=True) + template_id = models.CharField(max_length=400, null=True, blank=True) @property def template(self) -> Optional[HogFunctionTemplate]: diff --git a/posthog/models/insight.py b/posthog/models/insight.py index 2fcb7213dd618..e73c44d90dd2d 100644 --- a/posthog/models/insight.py +++ b/posthog/models/insight.py @@ -29,27 +29,27 @@ class Insight(models.Model): reports or part of a dashboard. """ - name: models.CharField = models.CharField(max_length=400, null=True, blank=True) - derived_name: models.CharField = models.CharField(max_length=400, null=True, blank=True) - description: models.CharField = models.CharField(max_length=400, null=True, blank=True) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - filters: models.JSONField = models.JSONField(default=dict) - filters_hash: models.CharField = models.CharField(max_length=400, null=True, blank=True) - query: models.JSONField = models.JSONField(null=True, blank=True) - order: models.IntegerField = models.IntegerField(null=True, blank=True) - deleted: models.BooleanField = models.BooleanField(default=False) - saved: models.BooleanField = models.BooleanField(default=False) - created_at: models.DateTimeField = models.DateTimeField(null=True, blank=True, auto_now_add=True) - refreshing: models.BooleanField = models.BooleanField(default=False) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + name = models.CharField(max_length=400, null=True, blank=True) + derived_name = models.CharField(max_length=400, null=True, blank=True) + description = models.CharField(max_length=400, null=True, blank=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + filters = models.JSONField(default=dict) + filters_hash = models.CharField(max_length=400, null=True, blank=True) + query = models.JSONField(null=True, blank=True) + order = models.IntegerField(null=True, blank=True) + deleted = models.BooleanField(default=False) + saved = models.BooleanField(default=False) + created_at = models.DateTimeField(null=True, blank=True, auto_now_add=True) + refreshing = models.BooleanField(default=False) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) # Indicates if it's a sample graph generated by dashboard templates - is_sample: models.BooleanField = models.BooleanField(default=False) + is_sample = models.BooleanField(default=False) # Unique ID per team for easy sharing and short links - short_id: models.CharField = models.CharField(max_length=12, blank=True, default=generate_short_id) - favorited: models.BooleanField = models.BooleanField(default=False) - refresh_attempt: models.IntegerField = models.IntegerField(null=True, blank=True) - last_modified_at: models.DateTimeField = models.DateTimeField(default=timezone.now) - last_modified_by: models.ForeignKey = models.ForeignKey( + short_id = models.CharField(max_length=12, blank=True, default=generate_short_id) + favorited = models.BooleanField(default=False) + refresh_attempt = models.IntegerField(null=True, blank=True) + last_modified_at = models.DateTimeField(default=timezone.now) + last_modified_by = models.ForeignKey( "User", on_delete=models.SET_NULL, null=True, @@ -58,7 +58,7 @@ class Insight(models.Model): ) # DEPRECATED: using the new "dashboards" relation instead - dashboard: models.ForeignKey = models.ForeignKey( + dashboard = models.ForeignKey( "Dashboard", related_name="items", on_delete=models.CASCADE, @@ -66,19 +66,19 @@ class Insight(models.Model): blank=True, ) # DEPRECATED: within cached results package now - last_refresh: models.DateTimeField = models.DateTimeField(blank=True, null=True) + last_refresh = models.DateTimeField(blank=True, null=True) # DEPRECATED: on dashboard_insight now - layouts: models.JSONField = models.JSONField(default=dict) + layouts = models.JSONField(default=dict) # DEPRECATED: on dashboard_insight now - color: models.CharField = models.CharField(max_length=400, null=True, blank=True) + color = models.CharField(max_length=400, null=True, blank=True) # DEPRECATED: dive dashboards were never shipped - dive_dashboard: models.ForeignKey = models.ForeignKey("Dashboard", on_delete=models.SET_NULL, null=True, blank=True) + dive_dashboard = models.ForeignKey("Dashboard", on_delete=models.SET_NULL, null=True, blank=True) # DEPRECATED: in practically all cases field `last_modified_at` should be used instead - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + updated_at = models.DateTimeField(auto_now=True) # DEPRECATED: use `display` property of the Filter object instead - type: models.CharField = deprecate_field(models.CharField(max_length=400, null=True, blank=True)) + type = deprecate_field(models.CharField(max_length=400, null=True, blank=True)) # DEPRECATED: we don't store funnels as a separate model any more - funnel: models.IntegerField = deprecate_field(models.IntegerField(null=True, blank=True)) + funnel = deprecate_field(models.IntegerField(null=True, blank=True)) # DEPRECATED: now using app-wide tagging model. See EnterpriseTaggedItem deprecated_tags: ArrayField = ArrayField(models.CharField(max_length=32), null=True, blank=True, default=list) # DEPRECATED: now using app-wide tagging model. See EnterpriseTaggedItem @@ -124,14 +124,22 @@ def query_from_filters(self): from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query try: - return {"kind": "InsightVizNode", "source": filter_to_query(self.filters).model_dump(), "full": True} + return { + "kind": "InsightVizNode", + "source": filter_to_query(self.filters).model_dump(exclude_none=True), + "full": True, + } except Exception as e: capture_exception(e) - def dashboard_filters(self, dashboard: Optional[Dashboard] = None): + def dashboard_filters( + self, dashboard: Optional[Dashboard] = None, dashboard_filters_override: Optional[dict] = None + ): # query date range is set in a different function, see dashboard_query if dashboard and not self.query: - dashboard_filters = {**dashboard.filters} + dashboard_filters = { + **(dashboard_filters_override if dashboard_filters_override is not None else dashboard.filters) + } dashboard_properties = dashboard_filters.pop("properties") if dashboard_filters.get("properties") else None insight_date_from = self.filters.get("date_from", None) insight_date_to = self.filters.get("date_to", None) @@ -181,13 +189,19 @@ def dashboard_filters(self, dashboard: Optional[Dashboard] = None): else: return self.filters - def get_effective_query(self, *, dashboard: Optional[Dashboard]) -> Optional[dict]: + def get_effective_query( + self, *, dashboard: Optional[Dashboard], dashboard_filters_override: Optional[dict] = None + ) -> Optional[dict]: from posthog.hogql_queries.apply_dashboard_filters import apply_dashboard_filters_to_dict if not dashboard or not self.query: return self.query - return apply_dashboard_filters_to_dict(self.query, dashboard.filters, self.team) + return apply_dashboard_filters_to_dict( + self.query, + dashboard_filters_override if dashboard_filters_override is not None else dashboard.filters, + self.team, + ) @property def url(self): @@ -195,15 +209,17 @@ def url(self): class InsightViewed(models.Model): - class Meta: - constraints = [models.UniqueConstraint(fields=["team", "user", "insight"], name="posthog_unique_insightviewed")] - indexes = [models.Index(fields=["team_id", "user_id", "-last_viewed_at"])] + # To track views from shared insights, team and user can be null + team = models.ForeignKey("Team", on_delete=models.CASCADE, null=True, blank=True) + user = models.ForeignKey("User", on_delete=models.CASCADE, null=True, blank=True) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - user: models.ForeignKey = models.ForeignKey("User", on_delete=models.CASCADE) insight: models.ForeignKey = models.ForeignKey(Insight, on_delete=models.CASCADE) last_viewed_at: models.DateTimeField = models.DateTimeField() + class Meta: + constraints = [models.UniqueConstraint(fields=["team", "user", "insight"], name="posthog_unique_insightviewed")] + indexes = [models.Index(fields=["team_id", "user_id", "-last_viewed_at"])] + @timed("generate_insight_cache_key") def generate_insight_filters_hash(insight: Insight, dashboard: Optional[Dashboard]) -> str: diff --git a/posthog/models/insight_caching_state.py b/posthog/models/insight_caching_state.py index 6ee767bc520e2..ab6c1c9d67d5b 100644 --- a/posthog/models/insight_caching_state.py +++ b/posthog/models/insight_caching_state.py @@ -21,7 +21,7 @@ class Meta: ) ] - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) + team = models.ForeignKey(Team, on_delete=models.CASCADE) insight = models.ForeignKey( "posthog.Insight", @@ -35,16 +35,16 @@ class Meta: related_name="caching_states", null=True, ) - cache_key: models.CharField = models.CharField(max_length=400, null=False, blank=False) + cache_key = models.CharField(max_length=400, null=False, blank=False) - target_cache_age_seconds: models.IntegerField = models.IntegerField(null=True) + target_cache_age_seconds = models.IntegerField(null=True) - last_refresh: models.DateTimeField = models.DateTimeField(blank=True, null=True) - last_refresh_queued_at: models.DateTimeField = models.DateTimeField(blank=True, null=True) - refresh_attempt: models.IntegerField = models.IntegerField(null=False, default=0) + last_refresh = models.DateTimeField(blank=True, null=True) + last_refresh_queued_at = models.DateTimeField(blank=True, null=True) + refresh_attempt = models.IntegerField(null=False, default=0) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) @mutable_receiver(post_save, sender=SharingConfiguration) diff --git a/posthog/models/instance_setting.py b/posthog/models/instance_setting.py index e03e72535c9ae..4858d39e05a91 100644 --- a/posthog/models/instance_setting.py +++ b/posthog/models/instance_setting.py @@ -9,12 +9,12 @@ class InstanceSetting(models.Model): + key = models.CharField(max_length=128, null=False, blank=False) + raw_value = models.CharField(max_length=1024, null=False, blank=True) + class Meta: constraints = [models.UniqueConstraint(fields=["key"], name="unique key")] - key: models.CharField = models.CharField(max_length=128, null=False, blank=False) - raw_value: models.CharField = models.CharField(max_length=1024, null=False, blank=True) - @property def value(self): return json.loads(self.raw_value) diff --git a/posthog/models/integration.py b/posthog/models/integration.py index 38cac084e200a..c613bd5aa70c8 100644 --- a/posthog/models/integration.py +++ b/posthog/models/integration.py @@ -40,29 +40,29 @@ class IntegrationKind(models.TextChoices): SALESFORCE = "salesforce" HUBSPOT = "hubspot" - class Meta: - constraints = [ - models.UniqueConstraint( - fields=["team", "kind", "integration_id"], name="posthog_integration_kind_id_unique" - ) - ] - - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) + team = models.ForeignKey("Team", on_delete=models.CASCADE) # The integration type identifier - kind: models.CharField = models.CharField(max_length=10, choices=IntegrationKind.choices) + kind = models.CharField(max_length=10, choices=IntegrationKind.choices) # The ID of the integration in the external system - integration_id: models.TextField = models.TextField(null=True, blank=True) + integration_id = models.TextField(null=True, blank=True) # Any config that COULD be passed to the frontend - config: models.JSONField = models.JSONField(default=dict) + config = models.JSONField(default=dict) # Any sensitive config that SHOULD NOT be passed to the frontend - sensitive_config: models.JSONField = models.JSONField(default=dict) + sensitive_config = models.JSONField(default=dict) - errors: models.TextField = models.TextField() + errors = models.TextField() # Meta - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + created_at = models.DateTimeField(auto_now_add=True, blank=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["team", "kind", "integration_id"], name="posthog_integration_kind_id_unique" + ) + ] @property def display_name(self) -> str: @@ -322,7 +322,7 @@ def list_channels(self) -> list[dict]: return sorted(channels, key=lambda x: x["name"]) def _list_channels_by_type(self, type: Literal["public_channel", "private_channel"]) -> list[dict]: - max_page = 10 + max_page = 20 channels = [] cursor = None diff --git a/posthog/models/messaging.py b/posthog/models/messaging.py index 5514f98baccb2..1f013ce09f75d 100644 --- a/posthog/models/messaging.py +++ b/posthog/models/messaging.py @@ -23,12 +23,12 @@ def get_or_create(self, defaults=None, **kwargs): class MessagingRecord(UUIDModel): objects = MessagingRecordManager() - email_hash: models.CharField = models.CharField(max_length=1024) - campaign_key: models.CharField = models.CharField(max_length=128) + email_hash = models.CharField(max_length=1024) + campaign_key = models.CharField(max_length=128) # Numeric indicator for repeat emails of the same campaign key - campaign_count: models.IntegerField = models.IntegerField(null=True) - sent_at: models.DateTimeField = models.DateTimeField(null=True) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) + campaign_count = models.IntegerField(null=True) + sent_at = models.DateTimeField(null=True) + created_at = models.DateTimeField(auto_now_add=True) class Meta: unique_together = ( diff --git a/posthog/models/notebook/notebook.py b/posthog/models/notebook/notebook.py index ec61ab1c22ed0..48553a684cb65 100644 --- a/posthog/models/notebook/notebook.py +++ b/posthog/models/notebook/notebook.py @@ -8,17 +8,17 @@ class Notebook(UUIDModel): - short_id: models.CharField = models.CharField(max_length=12, blank=True, default=generate_short_id) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - title: models.CharField = models.CharField(max_length=256, blank=True, null=True) + short_id = models.CharField(max_length=12, blank=True, default=generate_short_id) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + title = models.CharField(max_length=256, blank=True, null=True) content: JSONField = JSONField(default=None, null=True, blank=True) - text_content: models.TextField = models.TextField(blank=True, null=True) - deleted: models.BooleanField = models.BooleanField(default=False) - version: models.IntegerField = models.IntegerField(default=0) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) - last_modified_at: models.DateTimeField = models.DateTimeField(default=timezone.now) - last_modified_by: models.ForeignKey = models.ForeignKey( + text_content = models.TextField(blank=True, null=True) + deleted = models.BooleanField(default=False) + version = models.IntegerField(default=0) + created_at = models.DateTimeField(auto_now_add=True, blank=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + last_modified_at = models.DateTimeField(default=timezone.now) + last_modified_by = models.ForeignKey( "User", on_delete=models.SET_NULL, null=True, diff --git a/posthog/models/organization.py b/posthog/models/organization.py index 5f146fc8b45a2..e64f45ff8abc4 100644 --- a/posthog/models/organization.py +++ b/posthog/models/organization.py @@ -112,31 +112,29 @@ class PluginsAccessLevel(models.IntegerChoices): # This includes installing plugins from the repository and managing plugin installations for all other orgs. ROOT = 9, "root" - members: models.ManyToManyField = models.ManyToManyField( + members = models.ManyToManyField( "posthog.User", through="posthog.OrganizationMembership", related_name="organizations", related_query_name="organization", ) - name: models.CharField = models.CharField(max_length=64) + name = models.CharField(max_length=64) slug: LowercaseSlugField = LowercaseSlugField(unique=True, max_length=MAX_SLUG_LENGTH) - logo_media: models.ForeignKey = models.ForeignKey( - "posthog.UploadedMedia", on_delete=models.SET_NULL, null=True, blank=True - ) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - plugins_access_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( + logo_media = models.ForeignKey("posthog.UploadedMedia", on_delete=models.SET_NULL, null=True, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + plugins_access_level = models.PositiveSmallIntegerField( default=PluginsAccessLevel.CONFIG, choices=PluginsAccessLevel.choices, ) - for_internal_metrics: models.BooleanField = models.BooleanField(default=False) - is_member_join_email_enabled: models.BooleanField = models.BooleanField(default=True) - enforce_2fa: models.BooleanField = models.BooleanField(null=True, blank=True) + for_internal_metrics = models.BooleanField(default=False) + is_member_join_email_enabled = models.BooleanField(default=True) + enforce_2fa = models.BooleanField(null=True, blank=True) - is_hipaa: models.BooleanField = models.BooleanField(default=False, null=True, blank=True) + is_hipaa = models.BooleanField(default=False, null=True, blank=True) ## Managed by Billing - customer_id: models.CharField = models.CharField(max_length=200, null=True, blank=True) + customer_id = models.CharField(max_length=200, null=True, blank=True) available_product_features = ArrayField(models.JSONField(blank=False), null=True, blank=True) # Managed by Billing, cached here for usage controls # Like { @@ -145,14 +143,14 @@ class PluginsAccessLevel(models.IntegerChoices): # 'period': ['2021-01-01', '2021-01-31'] # } # Also currently indicates if the organization is on billing V2 or not - usage: models.JSONField = models.JSONField(null=True, blank=True) - never_drop_data: models.BooleanField = models.BooleanField(default=False, null=True, blank=True) + usage = models.JSONField(null=True, blank=True) + never_drop_data = models.BooleanField(default=False, null=True, blank=True) # Scoring levels defined in billing::customer::TrustScores - customer_trust_scores: models.JSONField = models.JSONField(default=dict, null=True, blank=True) + customer_trust_scores = models.JSONField(default=dict, null=True, blank=True) # DEPRECATED attributes (should be removed on next major version) - setup_section_2_completed: models.BooleanField = models.BooleanField(default=True) - personalization: models.JSONField = models.JSONField(default=dict, null=False, blank=True) + setup_section_2_completed = models.BooleanField(default=True) + personalization = models.JSONField(default=dict, null=False, blank=True) domain_whitelist: ArrayField = ArrayField( models.CharField(max_length=256, blank=False), blank=True, default=list ) # DEPRECATED in favor of `OrganizationDomain` model; previously used to allow self-serve account creation based on social login (#5111) @@ -260,23 +258,21 @@ class Level(models.IntegerChoices): ADMIN = 8, "administrator" OWNER = 15, "owner" - organization: models.ForeignKey = models.ForeignKey( + organization = models.ForeignKey( "posthog.Organization", on_delete=models.CASCADE, related_name="memberships", related_query_name="membership", ) - user: models.ForeignKey = models.ForeignKey( + user = models.ForeignKey( "posthog.User", on_delete=models.CASCADE, related_name="organization_memberships", related_query_name="organization_membership", ) - level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( - default=Level.MEMBER, choices=Level.choices - ) - joined_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + level = models.PositiveSmallIntegerField(default=Level.MEMBER, choices=Level.choices) + joined_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) class Meta: constraints = [ diff --git a/posthog/models/organization_domain.py b/posthog/models/organization_domain.py index d3b7138d6a56f..0766cd25461b3 100644 --- a/posthog/models/organization_domain.py +++ b/posthog/models/organization_domain.py @@ -117,26 +117,26 @@ def get_sso_enforcement_for_email_address(self, email: str) -> Optional[str]: class OrganizationDomain(UUIDModel): objects: OrganizationDomainManager = OrganizationDomainManager() - organization: models.ForeignKey = models.ForeignKey(Organization, on_delete=models.CASCADE, related_name="domains") - domain: models.CharField = models.CharField(max_length=128, unique=True) - verification_challenge: models.CharField = models.CharField(max_length=128, default=generate_verification_challenge) - verified_at: models.DateTimeField = models.DateTimeField( + organization = models.ForeignKey(Organization, on_delete=models.CASCADE, related_name="domains") + domain = models.CharField(max_length=128, unique=True) + verification_challenge = models.CharField(max_length=128, default=generate_verification_challenge) + verified_at = models.DateTimeField( null=True, blank=True, default=None ) # verification (through DNS) is only used for PostHog Cloud; on self-hosted we take all domains as verified - last_verification_retry: models.DateTimeField = models.DateTimeField(null=True, blank=True, default=None) - jit_provisioning_enabled: models.BooleanField = models.BooleanField( + last_verification_retry = models.DateTimeField(null=True, blank=True, default=None) + jit_provisioning_enabled = models.BooleanField( default=False ) # Just-in-time automatic provisioning (user accounts are created on the respective org when logging in with any SSO provider) - sso_enforcement: models.CharField = models.CharField( + sso_enforcement = models.CharField( max_length=28, blank=True ) # currently only used for PostHog Cloud; SSO enforcement on self-hosted is set by env var # ---- SAML attributes ---- # Normally not good practice to have `null=True` in `CharField` (as you have to nil states now), but creating non-nullable # attributes locks up tables when migrating. Remove `null=True` on next major release. - saml_entity_id: models.CharField = models.CharField(max_length=512, blank=True, null=True) - saml_acs_url: models.CharField = models.CharField(max_length=512, blank=True, null=True) - saml_x509_cert: models.TextField = models.TextField(blank=True, null=True) + saml_entity_id = models.CharField(max_length=512, blank=True, null=True) + saml_acs_url = models.CharField(max_length=512, blank=True, null=True) + saml_x509_cert = models.TextField(blank=True, null=True) class Meta: verbose_name = "domain" diff --git a/posthog/models/organization_invite.py b/posthog/models/organization_invite.py index be0c30dd6bae9..fc45084626a7d 100644 --- a/posthog/models/organization_invite.py +++ b/posthog/models/organization_invite.py @@ -36,29 +36,29 @@ def validate_private_project_access(value): class OrganizationInvite(UUIDModel): - organization: models.ForeignKey = models.ForeignKey( + organization = models.ForeignKey( "posthog.Organization", on_delete=models.CASCADE, related_name="invites", related_query_name="invite", ) - target_email: models.EmailField = models.EmailField(null=True, db_index=True) - first_name: models.CharField = models.CharField(max_length=30, blank=True, default="") - created_by: models.ForeignKey = models.ForeignKey( + target_email = models.EmailField(null=True, db_index=True) + first_name = models.CharField(max_length=30, blank=True, default="") + created_by = models.ForeignKey( "posthog.User", on_delete=models.SET_NULL, related_name="organization_invites", related_query_name="organization_invite", null=True, ) - emailing_attempt_made: models.BooleanField = models.BooleanField(default=False) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - message: models.TextField = models.TextField(blank=True, null=True) - level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( + emailing_attempt_made = models.BooleanField(default=False) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + message = models.TextField(blank=True, null=True) + level = models.PositiveSmallIntegerField( default=OrganizationMembership.Level.MEMBER, choices=OrganizationMembership.Level.choices ) - private_project_access: models.JSONField = models.JSONField( + private_project_access = models.JSONField( default=list, null=True, blank=True, diff --git a/posthog/models/person/person.py b/posthog/models/person/person.py index b1e4b4b626a99..f704e14fa4499 100644 --- a/posthog/models/person/person.py +++ b/posthog/models/person/person.py @@ -29,22 +29,22 @@ def distinct_ids_exist(team_id: int, distinct_ids: list[str]) -> bool: class Person(models.Model): _distinct_ids: Optional[list[str]] - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) + created_at = models.DateTimeField(auto_now_add=True, blank=True) # used to prevent race conditions with set and set_once - properties_last_updated_at: models.JSONField = models.JSONField(default=dict, null=True, blank=True) + properties_last_updated_at = models.JSONField(default=dict, null=True, blank=True) # used for evaluating if we need to override the value or not (value: set or set_once) - properties_last_operation: models.JSONField = models.JSONField(null=True, blank=True) + properties_last_operation = models.JSONField(null=True, blank=True) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - properties: models.JSONField = models.JSONField(default=dict) - is_user: models.ForeignKey = models.ForeignKey("User", on_delete=models.CASCADE, null=True, blank=True) - is_identified: models.BooleanField = models.BooleanField(default=False) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + properties = models.JSONField(default=dict) + is_user = models.ForeignKey("User", on_delete=models.CASCADE, null=True, blank=True) + is_identified = models.BooleanField(default=False) uuid = models.UUIDField(db_index=True, default=UUIDT, editable=False) # current version of the person, used to sync with ClickHouse and collapse rows correctly - version: models.BigIntegerField = models.BigIntegerField(null=True, blank=True) + version = models.BigIntegerField(null=True, blank=True) # Has an index on properties -> email from migration 0121, (team_id, id DESC) from migration 0164 @@ -118,29 +118,29 @@ def split_person(self, main_distinct_id: Optional[str], max_splits: Optional[int class PersonDistinctId(models.Model): - class Meta: - constraints = [models.UniqueConstraint(fields=["team", "distinct_id"], name="unique distinct_id for team")] - - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, db_index=False) - person: models.ForeignKey = models.ForeignKey(Person, on_delete=models.CASCADE) - distinct_id: models.CharField = models.CharField(max_length=400) + team = models.ForeignKey("Team", on_delete=models.CASCADE, db_index=False) + person = models.ForeignKey(Person, on_delete=models.CASCADE) + distinct_id = models.CharField(max_length=400) # current version of the id, used to sync with ClickHouse and collapse rows correctly for new clickhouse table - version: models.BigIntegerField = models.BigIntegerField(null=True, blank=True) + version = models.BigIntegerField(null=True, blank=True) + + class Meta: + constraints = [models.UniqueConstraint(fields=["team", "distinct_id"], name="unique distinct_id for team")] class PersonlessDistinctId(models.Model): + id = models.BigAutoField(primary_key=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE, db_index=False) + distinct_id = models.CharField(max_length=400) + is_merged = models.BooleanField(default=False) + created_at = models.DateTimeField(auto_now_add=True, blank=True) + class Meta: constraints = [ models.UniqueConstraint(fields=["team", "distinct_id"], name="unique personless distinct_id for team") ] - id: models.BigAutoField = models.BigAutoField(primary_key=True) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, db_index=False) - distinct_id: models.CharField = models.CharField(max_length=400) - is_merged: models.BooleanField = models.BooleanField(default=False) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) - class PersonOverrideMapping(models.Model): """A model of persons to be overriden in merge or merge-like events.""" @@ -169,23 +169,23 @@ class PersonOverride(models.Model): """ id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID") - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) + team = models.ForeignKey("Team", on_delete=models.CASCADE) - old_person_id: models.ForeignKey = models.ForeignKey( + old_person_id = models.ForeignKey( "PersonOverrideMapping", db_column="old_person_id", related_name="person_override_old", on_delete=models.CASCADE, ) - override_person_id: models.ForeignKey = models.ForeignKey( + override_person_id = models.ForeignKey( "PersonOverrideMapping", db_column="override_person_id", related_name="person_override_override", on_delete=models.CASCADE, ) - oldest_event: models.DateTimeField = models.DateTimeField() - version: models.BigIntegerField = models.BigIntegerField(null=True, blank=True) + oldest_event = models.DateTimeField() + version = models.BigIntegerField(null=True, blank=True) class Meta: constraints = [ diff --git a/posthog/models/personal_api_key.py b/posthog/models/personal_api_key.py index fe23d8f66fdff..ea886b55757c6 100644 --- a/posthog/models/personal_api_key.py +++ b/posthog/models/personal_api_key.py @@ -41,19 +41,19 @@ def mask_key_value(value: str) -> str: class PersonalAPIKey(models.Model): - id: models.CharField = models.CharField(primary_key=True, max_length=50, default=generate_random_token) + id = models.CharField(primary_key=True, max_length=50, default=generate_random_token) user = models.ForeignKey("posthog.User", on_delete=models.CASCADE, related_name="personal_api_keys") - label: models.CharField = models.CharField(max_length=40) - value: models.CharField = models.CharField(unique=True, max_length=50, editable=False, null=True, blank=True) - mask_value: models.CharField = models.CharField(max_length=11, editable=False, null=True) - secure_value: models.CharField = models.CharField( + label = models.CharField(max_length=40) + value = models.CharField(unique=True, max_length=50, editable=False, null=True, blank=True) + mask_value = models.CharField(max_length=11, editable=False, null=True) + secure_value = models.CharField( unique=True, max_length=300, null=True, editable=False, ) - created_at: models.DateTimeField = models.DateTimeField(default=timezone.now) - last_used_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) + created_at = models.DateTimeField(default=timezone.now) + last_used_at = models.DateTimeField(null=True, blank=True) scopes: ArrayField = ArrayField(models.CharField(max_length=100), null=True) scoped_teams: ArrayField = ArrayField(models.IntegerField(), null=True) scoped_organizations: ArrayField = ArrayField(models.CharField(max_length=100), null=True) diff --git a/posthog/models/plugin.py b/posthog/models/plugin.py index 19d07578cf4a5..9fe174a9dbe68 100644 --- a/posthog/models/plugin.py +++ b/posthog/models/plugin.py @@ -151,49 +151,47 @@ class PluginType(models.TextChoices): ) # Code checked into plugin_server, url starts with "inline:" # DEPRECATED: plugin-server will own all plugin code, org relations don't make sense - organization: models.ForeignKey = models.ForeignKey( + organization = models.ForeignKey( "posthog.Organization", on_delete=models.CASCADE, related_name="plugins", related_query_name="plugin", null=True, ) - plugin_type: models.CharField = models.CharField( - max_length=200, null=True, blank=True, choices=PluginType.choices, default=None - ) - is_global: models.BooleanField = models.BooleanField(default=False) # Whether plugin is installed for all orgs - is_preinstalled: models.BooleanField = models.BooleanField(default=False) - is_stateless: models.BooleanField = models.BooleanField( + plugin_type = models.CharField(max_length=200, null=True, blank=True, choices=PluginType.choices, default=None) + is_global = models.BooleanField(default=False) # Whether plugin is installed for all orgs + is_preinstalled = models.BooleanField(default=False) + is_stateless = models.BooleanField( default=False, null=True, blank=True ) # Whether plugin can run one VM across teams - name: models.CharField = models.CharField(max_length=200, null=True, blank=True) - description: models.TextField = models.TextField(null=True, blank=True) - url: models.CharField = models.CharField(max_length=800, null=True, blank=True, unique=True) - icon: models.CharField = models.CharField(max_length=800, null=True, blank=True) + name = models.CharField(max_length=200, null=True, blank=True) + description = models.TextField(null=True, blank=True) + url = models.CharField(max_length=800, null=True, blank=True, unique=True) + icon = models.CharField(max_length=800, null=True, blank=True) # Describe the fields to ask in the interface; store answers in PluginConfig->config # - config_schema = { [fieldKey]: { name: 'api key', type: 'string', default: '', required: true } } - config_schema: models.JSONField = models.JSONField(default=dict, blank=True) - tag: models.CharField = models.CharField(max_length=200, null=True, blank=True) - archive: models.BinaryField = models.BinaryField(blank=True, null=True) - latest_tag: models.CharField = models.CharField(max_length=800, null=True, blank=True) - latest_tag_checked_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) - capabilities: models.JSONField = models.JSONField(default=dict) - metrics: models.JSONField = models.JSONField(default=dict, null=True, blank=True) - public_jobs: models.JSONField = models.JSONField(default=dict, null=True, blank=True) + config_schema = models.JSONField(default=dict, blank=True) + tag = models.CharField(max_length=200, null=True, blank=True) + archive = models.BinaryField(blank=True, null=True) + latest_tag = models.CharField(max_length=800, null=True, blank=True) + latest_tag_checked_at = models.DateTimeField(null=True, blank=True) + capabilities = models.JSONField(default=dict) + metrics = models.JSONField(default=dict, null=True, blank=True) + public_jobs = models.JSONField(default=dict, null=True, blank=True) # DEPRECATED: not used for anything, all install and config errors are in PluginConfig.error - error: models.JSONField = models.JSONField(default=None, null=True, blank=True) + error = models.JSONField(default=None, null=True, blank=True) # DEPRECATED: this was used when syncing posthog.json with the db on app start - from_json: models.BooleanField = models.BooleanField(default=False) + from_json = models.BooleanField(default=False) # DEPRECATED: this was used when syncing posthog.json with the db on app start - from_web: models.BooleanField = models.BooleanField(default=False) + from_web = models.BooleanField(default=False) # DEPRECATED: using PluginSourceFile model instead - source: models.TextField = models.TextField(blank=True, null=True) + source = models.TextField(blank=True, null=True) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) - log_level: models.IntegerField = models.IntegerField(null=True, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(null=True, blank=True) + log_level = models.IntegerField(null=True, blank=True) # Some plugins are private, only certain organizations should be able to access them # Sometimes we want to deprecate plugins, where the first step is limiting access to organizations using them @@ -226,29 +224,29 @@ def get_default_config(self) -> dict[str, Any]: class PluginConfig(models.Model): - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, null=True) - plugin: models.ForeignKey = models.ForeignKey("Plugin", on_delete=models.CASCADE) - enabled: models.BooleanField = models.BooleanField(default=False) - order: models.IntegerField = models.IntegerField() - config: models.JSONField = models.JSONField(default=dict) + team = models.ForeignKey("Team", on_delete=models.CASCADE, null=True) + plugin = models.ForeignKey("Plugin", on_delete=models.CASCADE) + enabled = models.BooleanField(default=False) + order = models.IntegerField() + config = models.JSONField(default=dict) # DEPRECATED: use `plugin_log_entries` or `app_metrics` in ClickHouse instead # Error when running this plugin on an event (frontend: PluginErrorType) # - e.g: "undefined is not a function on index.js line 23" # - error = { message: "Exception in processEvent()", time: "iso-string", ...meta } - error: models.JSONField = models.JSONField(default=None, null=True, blank=True) + error = models.JSONField(default=None, null=True, blank=True) # Used to access site.ts from a public URL - web_token: models.CharField = models.CharField(max_length=64, default=None, null=True) + web_token = models.CharField(max_length=64, default=None, null=True) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) # Used in the frontend - name: models.CharField = models.CharField(max_length=400, null=True, blank=True) - description: models.CharField = models.CharField(max_length=1000, null=True, blank=True) + name = models.CharField(max_length=400, null=True, blank=True) + description = models.CharField(max_length=1000, null=True, blank=True) # Used in the frontend to hide pluginConfigs that user deleted - deleted: models.BooleanField = models.BooleanField(default=False, null=True) + deleted = models.BooleanField(default=False, null=True) # If set we will filter the plugin triggers for this event - filters: models.JSONField = models.JSONField(null=True, blank=True) + filters = models.JSONField(null=True, blank=True) # DEPRECATED - this never actually got used - filters is the way to go match_action = models.ForeignKey( @@ -267,16 +265,20 @@ class Meta: class PluginAttachment(models.Model): - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, null=True) - plugin_config: models.ForeignKey = models.ForeignKey("PluginConfig", on_delete=models.CASCADE, null=True) - key: models.CharField = models.CharField(max_length=200) - content_type: models.CharField = models.CharField(max_length=200) - file_name: models.CharField = models.CharField(max_length=200) - file_size: models.IntegerField = models.IntegerField() - contents: models.BinaryField = models.BinaryField() + team = models.ForeignKey("Team", on_delete=models.CASCADE, null=True) + plugin_config = models.ForeignKey("PluginConfig", on_delete=models.CASCADE, null=True) + key = models.CharField(max_length=200) + content_type = models.CharField(max_length=200) + file_name = models.CharField(max_length=200) + file_size = models.IntegerField() + contents = models.BinaryField() class PluginStorage(models.Model): + plugin_config = models.ForeignKey("PluginConfig", on_delete=models.CASCADE) + key = models.CharField(max_length=200) + value = models.TextField(blank=True, null=True) + class Meta: constraints = [ models.UniqueConstraint( @@ -285,10 +287,6 @@ class Meta: ) ] - plugin_config: models.ForeignKey = models.ForeignKey("PluginConfig", on_delete=models.CASCADE) - key: models.CharField = models.CharField(max_length=200) - value: models.TextField = models.TextField(blank=True, null=True) - class PluginLogEntrySource(StrEnum): SYSTEM = "SYSTEM" @@ -401,14 +399,14 @@ class Status(models.TextChoices): TRANSPILED = "TRANSPILED", "transpiled" ERROR = "ERROR", "error" - plugin: models.ForeignKey = models.ForeignKey("Plugin", on_delete=models.CASCADE) - filename: models.CharField = models.CharField(max_length=200, blank=False) + plugin = models.ForeignKey("Plugin", on_delete=models.CASCADE) + filename = models.CharField(max_length=200, blank=False) # "source" can be null if we're only using this model to cache transpiled code from a ".zip" - source: models.TextField = models.TextField(blank=True, null=True) - status: models.CharField = models.CharField(max_length=20, choices=Status.choices, null=True) - transpiled: models.TextField = models.TextField(blank=True, null=True) - error: models.TextField = models.TextField(blank=True, null=True) - updated_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) + source = models.TextField(blank=True, null=True) + status = models.CharField(max_length=20, choices=Status.choices, null=True) + transpiled = models.TextField(blank=True, null=True) + error = models.TextField(blank=True, null=True) + updated_at = models.DateTimeField(null=True, blank=True) objects: PluginSourceFileManager = PluginSourceFileManager() diff --git a/posthog/models/project.py b/posthog/models/project.py index 900c6fbed32cf..5bf82245db590 100644 --- a/posthog/models/project.py +++ b/posthog/models/project.py @@ -28,19 +28,19 @@ class Project(models.Model): `Project` is part of the environemnts feature, which is a work in progress. """ - id: models.BigIntegerField = models.BigIntegerField(primary_key=True, verbose_name="ID") - organization: models.ForeignKey = models.ForeignKey( + id = models.BigIntegerField(primary_key=True, verbose_name="ID") + organization = models.ForeignKey( "posthog.Organization", on_delete=models.CASCADE, related_name="projects", related_query_name="project", ) - name: models.CharField = models.CharField( + name = models.CharField( max_length=200, default="Default project", validators=[MinLengthValidator(1, "Project must have a name!")], ) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) + created_at = models.DateTimeField(auto_now_add=True) objects: ProjectManager = ProjectManager() diff --git a/posthog/models/property/property.py b/posthog/models/property/property.py index 5f62ef4f78cb1..59fbec3a78626 100644 --- a/posthog/models/property/property.py +++ b/posthog/models/property/property.py @@ -35,6 +35,7 @@ class BehavioralPropertyType(StrEnum): "precalculated-cohort", "group", "recording", + "log_entry", "behavioral", "session", "hogql", @@ -91,6 +92,7 @@ class BehavioralPropertyType(StrEnum): "precalculated-cohort": ["key", "value"], "group": ["key", "value", "group_type_index"], "recording": ["key", "value"], + "log_entry": ["key", "value"], "behavioral": ["key", "value"], "session": ["key", "value"], "hogql": ["key"], diff --git a/posthog/models/property_definition.py b/posthog/models/property_definition.py index 99196d1ff78e6..e2e77eba2148c 100644 --- a/posthog/models/property_definition.py +++ b/posthog/models/property_definition.py @@ -37,23 +37,23 @@ class Type(models.IntegerChoices): GROUP = 3, "group" SESSION = 4, "session" - team: models.ForeignKey = models.ForeignKey( + team = models.ForeignKey( Team, on_delete=models.CASCADE, related_name="property_definitions", related_query_name="team", ) - name: models.CharField = models.CharField(max_length=400) - is_numerical: models.BooleanField = models.BooleanField( + name = models.CharField(max_length=400) + is_numerical = models.BooleanField( default=False ) # whether the property can be interpreted as a number, and therefore used for math aggregation operations property_type = models.CharField(max_length=50, choices=PropertyType.choices, blank=True, null=True) # :TRICKY: May be null for historical events - type: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(default=Type.EVENT, choices=Type.choices) + type = models.PositiveSmallIntegerField(default=Type.EVENT, choices=Type.choices) # Only populated for `Type.GROUP` - group_type_index: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(null=True) + group_type_index = models.PositiveSmallIntegerField(null=True) # DEPRECATED property_type_format = models.CharField( @@ -61,11 +61,11 @@ class Type(models.IntegerChoices): ) # Deprecated in #8292 # DEPRECATED - volume_30_day: models.IntegerField = models.IntegerField(default=None, null=True) # Deprecated in #4480 + volume_30_day = models.IntegerField(default=None, null=True) # Deprecated in #4480 # DEPRECATED # Number of times an insight has been saved with this property in its filter in the last 30 rolling days (computed asynchronously when stars align) - query_usage_30_day: models.IntegerField = models.IntegerField(default=None, null=True) + query_usage_30_day = models.IntegerField(default=None, null=True) class Meta: indexes = [ diff --git a/posthog/models/proxy_record.py b/posthog/models/proxy_record.py index 3c421f4a0bb7c..c69f9d96d196e 100644 --- a/posthog/models/proxy_record.py +++ b/posthog/models/proxy_record.py @@ -4,12 +4,10 @@ class ProxyRecord(UUIDModel): - organization: models.ForeignKey = models.ForeignKey( - Organization, on_delete=models.CASCADE, related_name="proxy_records" - ) - domain: models.CharField = models.CharField(max_length=64, unique=True) - target_cname: models.CharField = models.CharField(max_length=256, null=False) - message: models.CharField = models.CharField(max_length=1024, null=True) + organization = models.ForeignKey(Organization, on_delete=models.CASCADE, related_name="proxy_records") + domain = models.CharField(max_length=64, unique=True) + target_cname = models.CharField(max_length=256, null=False) + message = models.CharField(max_length=1024, null=True) class Status(models.TextChoices): WAITING = "waiting" @@ -19,15 +17,15 @@ class Status(models.TextChoices): DELETING = "deleting" TIMED_OUT = "timed_out" - status: models.CharField = models.CharField( + status = models.CharField( choices=Status.choices, default=Status.WAITING, ) - created_by: models.ForeignKey = models.ForeignKey( + created_by = models.ForeignKey( "posthog.User", on_delete=models.SET_NULL, null=True, ) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) diff --git a/posthog/models/raw_sessions/migrations.py b/posthog/models/raw_sessions/migrations.py index ffc998c3a3d89..c7a2fa9758020 100644 --- a/posthog/models/raw_sessions/migrations.py +++ b/posthog/models/raw_sessions/migrations.py @@ -7,6 +7,7 @@ ADD COLUMN IF NOT EXISTS page_screen_autocapture_uniq_up_to AggregateFunction(uniqUpTo(1), Nullable(UUID)) +AFTER maybe_has_session_replay """ BASE_RAW_SESSIONS_ADD_PAGEVIEW_AUTOCAPTURE_SCREEN_UP_TO_2_COLUMN_SQL = ( diff --git a/posthog/models/raw_sessions/sql.py b/posthog/models/raw_sessions/sql.py index f2f2c03cfd1b7..37e16e3062363 100644 --- a/posthog/models/raw_sessions/sql.py +++ b/posthog/models/raw_sessions/sql.py @@ -94,11 +94,12 @@ autocapture_uniq AggregateFunction(uniq, Nullable(UUID)), screen_count SimpleAggregateFunction(sum, Int64), screen_uniq AggregateFunction(uniq, Nullable(UUID)), - -- as a performance optimisation, also keep track of the uniq events for all of these combined, a bounce is a session with <2 of these - page_screen_autocapture_uniq_up_to AggregateFunction(uniqUpTo(1), Nullable(UUID)), -- replay - maybe_has_session_replay SimpleAggregateFunction(max, Bool) -- will be written False to by the events table mv and True to by the replay table mv + maybe_has_session_replay SimpleAggregateFunction(max, Bool), -- will be written False to by the events table mv and True to by the replay table mv + + -- as a performance optimisation, also keep track of the uniq events for all of these combined, a bounce is a session with <2 of these + page_screen_autocapture_uniq_up_to AggregateFunction(uniqUpTo(1), Nullable(UUID)) ) ENGINE = {engine} """ @@ -214,10 +215,12 @@ def source_int_column(column_name: str) -> str: initializeAggregation('uniqState', if(event='autocapture', uuid, NULL)) as autocapture_uniq, if(event='$screen', 1, 0) as screen_count, initializeAggregation('uniqState', if(event='screen', uuid, NULL)) as screen_uniq, - initializeAggregation('uniqUpToState(1)', if(event='$pageview' OR event='$screen' OR event='$autocapture', uuid, NULL)) as page_screen_autocapture_uniq_up_to, -- replay - false as maybe_has_session_replay + false as maybe_has_session_replay, + + -- perf + initializeAggregation('uniqUpToState(1)', if(event='$pageview' OR event='$screen' OR event='$autocapture', uuid, NULL)) as page_screen_autocapture_uniq_up_to FROM {database}.events WHERE bitAnd(bitShiftRight(toUInt128(accurateCastOrNull(`$session_id`, 'UUID')), 76), 0xF) == 7 -- has a session id and is valid uuidv7 """.format( @@ -321,10 +324,12 @@ def source_int_column(column_name: str) -> str: uniqState(if(event='$autocapture', uuid, NULL)) as autocapture_uniq, sumIf(1, event='$screen') as screen_count, uniqState(if(event='$screen', uuid, NULL)) as screen_uniq, - uniqUpToState(1)(if(event='$pageview' OR event='$screen' OR event='$autocapture', uuid, NULL)) as page_screen_autocapture_uniq_up_to, -- replay - false as maybe_has_session_replay + false as maybe_has_session_replay, + + -- perf + uniqUpToState(1)(if(event='$pageview' OR event='$screen' OR event='$autocapture', uuid, NULL)) as page_screen_autocapture_uniq_up_to FROM {database}.sharded_events WHERE bitAnd(bitShiftRight(toUInt128(accurateCastOrNull(`$session_id`, 'UUID')), 76), 0xF) == 7 -- has a session id and is valid uuidv7) GROUP BY @@ -429,7 +434,7 @@ def source_int_column(column_name: str) -> str: session_id_v7, fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(session_id_v7, 80)), 1000)) as session_timestamp, team_id, - any(distinct_id) as distinct_id, + argMaxMerge(distinct_id) as distinct_id, min(min_timestamp) as min_timestamp, max(max_timestamp) as max_timestamp, @@ -483,7 +488,9 @@ def source_int_column(column_name: str) -> str: sum(screen_count) as screen_count, uniqMerge(screen_uniq) as screen_uniq, - max(maybe_has_session_replay) as maybe_has_session_replay + max(maybe_has_session_replay) as maybe_has_session_replay, + + uniqUpToMerge(1)(page_screen_autocapture_uniq_up_to) as page_screen_autocapture_uniq_up_to FROM {TABLE_BASE_NAME} GROUP BY session_id_v7, team_id """ diff --git a/posthog/models/scheduled_change.py b/posthog/models/scheduled_change.py index ee92cc59c506e..9b1d96c08cacf 100644 --- a/posthog/models/scheduled_change.py +++ b/posthog/models/scheduled_change.py @@ -6,17 +6,17 @@ class AllowedModels(models.TextChoices): FEATURE_FLAG = "FeatureFlag", "feature flag" id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID") - record_id: models.CharField = models.CharField(max_length=200) - model_name: models.CharField = models.CharField(max_length=100, choices=AllowedModels.choices) - payload: models.JSONField = models.JSONField(default=dict) - scheduled_at: models.DateTimeField = models.DateTimeField() - executed_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) + record_id = models.CharField(max_length=200) + model_name = models.CharField(max_length=100, choices=AllowedModels.choices) + payload = models.JSONField(default=dict) + scheduled_at = models.DateTimeField() + executed_at = models.DateTimeField(null=True, blank=True) failure_reason = models.CharField(max_length=400, null=True, blank=True) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + created_at = models.DateTimeField(auto_now_add=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True) + updated_at = models.DateTimeField(auto_now_add=True) class Meta: indexes = [ diff --git a/posthog/models/sharing_configuration.py b/posthog/models/sharing_configuration.py index 7bbacc453559d..84cbb400ce700 100644 --- a/posthog/models/sharing_configuration.py +++ b/posthog/models/sharing_configuration.py @@ -12,10 +12,10 @@ def get_default_access_token() -> str: class SharingConfiguration(models.Model): # Relations - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) + team = models.ForeignKey("Team", on_delete=models.CASCADE) dashboard = models.ForeignKey("posthog.Dashboard", on_delete=models.CASCADE, null=True) insight = models.ForeignKey("posthog.Insight", on_delete=models.CASCADE, null=True) - recording: models.ForeignKey = models.ForeignKey( + recording = models.ForeignKey( "SessionRecording", related_name="sharing_configurations", on_delete=models.CASCADE, @@ -24,10 +24,10 @@ class SharingConfiguration(models.Model): blank=True, ) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) + created_at = models.DateTimeField(auto_now_add=True, blank=True) - enabled: models.BooleanField = models.BooleanField(default=False) - access_token: models.CharField = models.CharField( + enabled = models.BooleanField(default=False) + access_token = models.CharField( max_length=400, null=True, blank=True, diff --git a/posthog/models/subscription.py b/posthog/models/subscription.py index 6bd742015f252..ef0d25bd9d070 100644 --- a/posthog/models/subscription.py +++ b/posthog/models/subscription.py @@ -74,36 +74,36 @@ class SubscriptionByWeekDay(models.TextChoices): SUNDAY = "sunday" # Relations - i.e. WHAT are we exporting? - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) + team = models.ForeignKey("Team", on_delete=models.CASCADE) dashboard = models.ForeignKey("posthog.Dashboard", on_delete=models.CASCADE, null=True) insight = models.ForeignKey("posthog.Insight", on_delete=models.CASCADE, null=True) # Subscription type (email, slack etc.) - title: models.CharField = models.CharField(max_length=100, null=True, blank=True) - target_type: models.CharField = models.CharField(max_length=10, choices=SubscriptionTarget.choices) - target_value: models.TextField = models.TextField() + title = models.CharField(max_length=100, null=True, blank=True) + target_type = models.CharField(max_length=10, choices=SubscriptionTarget.choices) + target_value = models.TextField() # Subscription delivery (related to rrule) - frequency: models.CharField = models.CharField(max_length=10, choices=SubscriptionFrequency.choices) - interval: models.IntegerField = models.IntegerField(default=1) - count: models.IntegerField = models.IntegerField(null=True) + frequency = models.CharField(max_length=10, choices=SubscriptionFrequency.choices) + interval = models.IntegerField(default=1) + count = models.IntegerField(null=True) byweekday: ArrayField = ArrayField( models.CharField(max_length=10, choices=SubscriptionByWeekDay.choices), null=True, blank=True, default=None, ) - bysetpos: models.IntegerField = models.IntegerField(null=True) - start_date: models.DateTimeField = models.DateTimeField() - until_date: models.DateTimeField = models.DateTimeField(null=True, blank=True) + bysetpos = models.IntegerField(null=True) + start_date = models.DateTimeField() + until_date = models.DateTimeField(null=True, blank=True) # Controlled field - next schedule as helper for - next_delivery_date: models.DateTimeField = models.DateTimeField(null=True, blank=True) + next_delivery_date = models.DateTimeField(null=True, blank=True) # Meta - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) - deleted: models.BooleanField = models.BooleanField(default=False) + created_at = models.DateTimeField(auto_now_add=True, blank=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + deleted = models.BooleanField(default=False) def save(self, *args, **kwargs) -> None: # Only if the schedule has changed do we update the next delivery date diff --git a/posthog/models/tag.py b/posthog/models/tag.py index fbe3e262e3c3e..55455c49fb542 100644 --- a/posthog/models/tag.py +++ b/posthog/models/tag.py @@ -8,8 +8,8 @@ def tagify(tag: str): class Tag(UUIDModel): - name: models.CharField = models.CharField(max_length=255) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) + name = models.CharField(max_length=255) + team = models.ForeignKey("Team", on_delete=models.CASCADE) class Meta: unique_together = ("name", "team") diff --git a/posthog/models/tagged_item.py b/posthog/models/tagged_item.py index 302adcdb24f23..7fcc1f7bc262a 100644 --- a/posthog/models/tagged_item.py +++ b/posthog/models/tagged_item.py @@ -55,46 +55,46 @@ class TaggedItem(UUIDModel): https://docs.djangoproject.com/en/4.0/ref/contrib/contenttypes/#generic-relations """ - tag: models.ForeignKey = models.ForeignKey("Tag", on_delete=models.CASCADE, related_name="tagged_items") + tag = models.ForeignKey("Tag", on_delete=models.CASCADE, related_name="tagged_items") # When adding a new taggeditem-model relationship, make sure to add the foreign key field and append field name to # the `RELATED_OBJECTS` tuple above. - dashboard: models.ForeignKey = models.ForeignKey( + dashboard = models.ForeignKey( "Dashboard", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items", ) - insight: models.ForeignKey = models.ForeignKey( + insight = models.ForeignKey( "Insight", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items", ) - event_definition: models.ForeignKey = models.ForeignKey( + event_definition = models.ForeignKey( "EventDefinition", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items", ) - property_definition: models.ForeignKey = models.ForeignKey( + property_definition = models.ForeignKey( "PropertyDefinition", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items", ) - action: models.ForeignKey = models.ForeignKey( + action = models.ForeignKey( "Action", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items", ) - feature_flag: models.ForeignKey = models.ForeignKey( + feature_flag = models.ForeignKey( "FeatureFlag", on_delete=models.CASCADE, null=True, diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index 57928b39444a1..bcb0b236c75d4 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -178,44 +178,44 @@ class Meta: ) ] - organization: models.ForeignKey = models.ForeignKey( + organization = models.ForeignKey( "posthog.Organization", on_delete=models.CASCADE, related_name="teams", related_query_name="team", ) - project: models.ForeignKey = models.ForeignKey( + project = models.ForeignKey( "posthog.Project", on_delete=models.CASCADE, related_name="teams", related_query_name="team", null=True, ) - api_token: models.CharField = models.CharField( + api_token = models.CharField( max_length=200, unique=True, default=generate_random_token_project, validators=[MinLengthValidator(10, "Project's API token must be at least 10 characters long!")], ) app_urls: ArrayField = ArrayField(models.CharField(max_length=200, null=True), default=list, blank=True) - name: models.CharField = models.CharField( + name = models.CharField( max_length=200, default="Default project", validators=[MinLengthValidator(1, "Project must have a name!")], ) - slack_incoming_webhook: models.CharField = models.CharField(max_length=500, null=True, blank=True) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - anonymize_ips: models.BooleanField = models.BooleanField(default=False) - completed_snippet_onboarding: models.BooleanField = models.BooleanField(default=False) - has_completed_onboarding_for: models.JSONField = models.JSONField(null=True, blank=True) - ingested_event: models.BooleanField = models.BooleanField(default=False) - autocapture_opt_out: models.BooleanField = models.BooleanField(null=True, blank=True) - autocapture_web_vitals_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) - autocapture_exceptions_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) - autocapture_exceptions_errors_to_ignore: models.JSONField = models.JSONField(null=True, blank=True) - session_recording_opt_in: models.BooleanField = models.BooleanField(default=False) - session_recording_sample_rate: models.DecimalField = models.DecimalField( + slack_incoming_webhook = models.CharField(max_length=500, null=True, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + anonymize_ips = models.BooleanField(default=False) + completed_snippet_onboarding = models.BooleanField(default=False) + has_completed_onboarding_for = models.JSONField(null=True, blank=True) + ingested_event = models.BooleanField(default=False) + autocapture_opt_out = models.BooleanField(null=True, blank=True) + autocapture_web_vitals_opt_in = models.BooleanField(null=True, blank=True) + autocapture_exceptions_opt_in = models.BooleanField(null=True, blank=True) + autocapture_exceptions_errors_to_ignore = models.JSONField(null=True, blank=True) + session_recording_opt_in = models.BooleanField(default=False) + session_recording_sample_rate = models.DecimalField( # will store a decimal between 0 and 1 allowing up to 2 decimal places null=True, blank=True, @@ -223,39 +223,37 @@ class Meta: decimal_places=2, validators=[MinValueValidator(Decimal(0)), MaxValueValidator(Decimal(1))], ) - session_recording_minimum_duration_milliseconds: models.IntegerField = models.IntegerField( + session_recording_minimum_duration_milliseconds = models.IntegerField( null=True, blank=True, validators=[MinValueValidator(0), MaxValueValidator(15000)], ) - session_recording_linked_flag: models.JSONField = models.JSONField(null=True, blank=True) - session_recording_network_payload_capture_config: models.JSONField = models.JSONField(null=True, blank=True) - session_replay_config: models.JSONField = models.JSONField(null=True, blank=True) - capture_console_log_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) - capture_performance_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) - surveys_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) - heatmaps_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) - session_recording_version: models.CharField = models.CharField(null=True, blank=True, max_length=24) - signup_token: models.CharField = models.CharField(max_length=200, null=True, blank=True) - is_demo: models.BooleanField = models.BooleanField(default=False) - access_control: models.BooleanField = models.BooleanField(default=False) - week_start_day: models.SmallIntegerField = models.SmallIntegerField( - null=True, blank=True, choices=WeekStartDay.choices - ) + session_recording_linked_flag = models.JSONField(null=True, blank=True) + session_recording_network_payload_capture_config = models.JSONField(null=True, blank=True) + session_replay_config = models.JSONField(null=True, blank=True) + capture_console_log_opt_in = models.BooleanField(null=True, blank=True) + capture_performance_opt_in = models.BooleanField(null=True, blank=True) + surveys_opt_in = models.BooleanField(null=True, blank=True) + heatmaps_opt_in = models.BooleanField(null=True, blank=True) + session_recording_version = models.CharField(null=True, blank=True, max_length=24) + signup_token = models.CharField(max_length=200, null=True, blank=True) + is_demo = models.BooleanField(default=False) + access_control = models.BooleanField(default=False) + week_start_day = models.SmallIntegerField(null=True, blank=True, choices=WeekStartDay.choices) # This is not a manual setting. It's updated automatically to reflect if the team uses site apps or not. - inject_web_apps: models.BooleanField = models.BooleanField(null=True) + inject_web_apps = models.BooleanField(null=True) - test_account_filters: models.JSONField = models.JSONField(default=list) - test_account_filters_default_checked: models.BooleanField = models.BooleanField(null=True, blank=True) + test_account_filters = models.JSONField(default=list) + test_account_filters_default_checked = models.BooleanField(null=True, blank=True) - path_cleaning_filters: models.JSONField = models.JSONField(default=list, null=True, blank=True) - timezone: models.CharField = models.CharField(max_length=240, choices=TIMEZONES, default="UTC") - data_attributes: models.JSONField = models.JSONField(default=get_default_data_attributes) + path_cleaning_filters = models.JSONField(default=list, null=True, blank=True) + timezone = models.CharField(max_length=240, choices=TIMEZONES, default="UTC") + data_attributes = models.JSONField(default=get_default_data_attributes) person_display_name_properties: ArrayField = ArrayField(models.CharField(max_length=400), null=True, blank=True) live_events_columns: ArrayField = ArrayField(models.TextField(), null=True, blank=True) recording_domains: ArrayField = ArrayField(models.CharField(max_length=200, null=True), blank=True, null=True) - primary_dashboard: models.ForeignKey = models.ForeignKey( + primary_dashboard = models.ForeignKey( "posthog.Dashboard", on_delete=models.SET_NULL, null=True, @@ -266,10 +264,10 @@ class Meta: # Generic field for storing any team-specific context that is more temporary in nature and thus # likely doesn't deserve a dedicated column. Can be used for things like settings and overrides # during feature releases. - extra_settings: models.JSONField = models.JSONField(null=True, blank=True) + extra_settings = models.JSONField(null=True, blank=True) # Project level default HogQL query modifiers - modifiers: models.JSONField = models.JSONField(null=True, blank=True) + modifiers = models.JSONField(null=True, blank=True) # This is meant to be used as a stopgap until https://github.com/PostHog/meta/pull/39 gets implemented # Switches _most_ queries to using distinct_id as aggregator instead of person_id @@ -285,22 +283,20 @@ def aggregate_users_by_distinct_id(self) -> bool: correlation_config = models.JSONField(default=dict, null=True, blank=True) # DEPRECATED, DISUSED: recordings on CH are cleared with Clickhouse's TTL - session_recording_retention_period_days: models.IntegerField = models.IntegerField( - null=True, default=None, blank=True - ) + session_recording_retention_period_days = models.IntegerField(null=True, default=None, blank=True) # DEPRECATED, DISUSED: plugins are enabled for everyone now - plugins_opt_in: models.BooleanField = models.BooleanField(default=False) + plugins_opt_in = models.BooleanField(default=False) # DEPRECATED, DISUSED: replaced with env variable OPT_OUT_CAPTURE and User.anonymized_data - opt_out_capture: models.BooleanField = models.BooleanField(default=False) + opt_out_capture = models.BooleanField(default=False) # DEPRECATED: in favor of `EventDefinition` model - event_names: models.JSONField = models.JSONField(default=list, blank=True) - event_names_with_usage: models.JSONField = models.JSONField(default=list, blank=True) + event_names = models.JSONField(default=list, blank=True) + event_names_with_usage = models.JSONField(default=list, blank=True) # DEPRECATED: in favor of `PropertyDefinition` model - event_properties: models.JSONField = models.JSONField(default=list, blank=True) - event_properties_with_usage: models.JSONField = models.JSONField(default=list, blank=True) - event_properties_numerical: models.JSONField = models.JSONField(default=list, blank=True) - external_data_workspace_id: models.CharField = models.CharField(max_length=400, null=True, blank=True) - external_data_workspace_last_synced_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) + event_properties = models.JSONField(default=list, blank=True) + event_properties_with_usage = models.JSONField(default=list, blank=True) + event_properties_numerical = models.JSONField(default=list, blank=True) + external_data_workspace_id = models.CharField(max_length=400, null=True, blank=True) + external_data_workspace_last_synced_at = models.DateTimeField(null=True, blank=True) objects: TeamManager = TeamManager() diff --git a/posthog/models/test/test_hog_function.py b/posthog/models/test/test_hog_function.py index 02bc9fb62e3c6..95d53f916a6c9 100644 --- a/posthog/models/test/test_hog_function.py +++ b/posthog/models/test/test_hog_function.py @@ -2,6 +2,7 @@ from django.test import TestCase from inline_snapshot import snapshot +from hogvm.python.operation import HOGQL_BYTECODE_VERSION from posthog.models.action.action import Action from posthog.models.hog_functions.hog_function import HogFunction from posthog.models.user import User @@ -30,7 +31,7 @@ def test_hog_function_team_no_filters_compilation(self): # Some json serialization is needed to compare the bytecode more easily in tests json_filters = to_dict(item.filters) - assert json_filters["bytecode"] == ["_h", 29] # TRUE + assert json_filters["bytecode"] == ["_H", HOGQL_BYTECODE_VERSION, 29] # TRUE def test_hog_function_filters_compilation(self): item = HogFunction.objects.create( @@ -45,20 +46,13 @@ def test_hog_function_filters_compilation(self): # Some json serialization is needed to compare the bytecode more easily in tests json_filters = to_dict(item.filters) - assert json_filters == { "events": [{"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}], "actions": [{"id": "9", "name": "Test Action", "type": "actions", "order": 1}], "filter_test_accounts": True, "bytecode": [ - "_h", - 33, - 2, - 33, - 1, - 11, - 32, - "^(localhost|127\\.0\\.0\\.1)($|:)", + "_H", + HOGQL_BYTECODE_VERSION, 32, "$host", 32, @@ -68,6 +62,8 @@ def test_hog_function_filters_compilation(self): 2, "toString", 1, + 32, + "^(localhost|127\\.0\\.0\\.1)($|:)", 2, "match", 2, @@ -77,8 +73,6 @@ def test_hog_function_filters_compilation(self): 35, 33, 1, - 3, - 2, 32, "$pageview", 32, @@ -86,8 +80,8 @@ def test_hog_function_filters_compilation(self): 1, 1, 11, - 32, - "^(localhost|127\\.0\\.0\\.1)($|:)", + 3, + 2, 32, "$host", 32, @@ -97,6 +91,8 @@ def test_hog_function_filters_compilation(self): 2, "toString", 1, + 32, + "^(localhost|127\\.0\\.0\\.1)($|:)", 2, "match", 2, @@ -106,6 +102,11 @@ def test_hog_function_filters_compilation(self): 35, 33, 1, + 33, + 2, + 33, + 1, + 11, 3, 2, 4, @@ -126,7 +127,7 @@ def test_hog_function_team_filters_only_compilation(self): json_filters = to_dict(item.filters) assert json.dumps(json_filters["bytecode"]) == snapshot( - '["_h", 32, "^(localhost|127\\\\.0\\\\.0\\\\.1)($|:)", 32, "$host", 32, "properties", 1, 2, 2, "toString", 1, 2, "match", 2, 5, 47, 3, 35, 33, 1, 3, 1]' + f'["_H", {HOGQL_BYTECODE_VERSION}, 32, "$host", 32, "properties", 1, 2, 2, "toString", 1, 32, "^(localhost|127\\\\.0\\\\.0\\\\.1)($|:)", 2, "match", 2, 5, 47, 3, 35, 33, 1, 3, 1]' ) @@ -197,11 +198,11 @@ def test_hog_functions_reload_on_action_saved(self): # Check that the bytecode is correct assert json.dumps(hog_function_1.filters["bytecode"]) == snapshot( - '["_h", 32, "old-value-2", 32, "prop-2", 32, "properties", 1, 2, 11, 3, 1, 32, "old-value-1", 32, "prop-1", 32, "properties", 1, 2, 11, 32, "test-event", 32, "event", 1, 1, 11, 3, 2, 3, 1, 4, 2]' + f'["_H", {HOGQL_BYTECODE_VERSION}, 32, "test-event", 32, "event", 1, 1, 11, 32, "old-value-1", 32, "prop-1", 32, "properties", 1, 2, 11, 3, 2, 3, 1, 32, "old-value-2", 32, "prop-2", 32, "properties", 1, 2, 11, 3, 1, 4, 2]' ) assert json.dumps(hog_function_2.filters["bytecode"]) == snapshot( - '["_h", 32, "old-value-1", 32, "prop-1", 32, "properties", 1, 2, 11, 32, "test-event", 32, "event", 1, 1, 11, 3, 2, 3, 1, 4, 1]' + f'["_H", {HOGQL_BYTECODE_VERSION}, 32, "test-event", 32, "event", 1, 1, 11, 32, "old-value-1", 32, "prop-1", 32, "properties", 1, 2, 11, 3, 2, 3, 1, 4, 1]' ) # Modify the action and check that the bytecode is updated @@ -225,10 +226,10 @@ def test_hog_functions_reload_on_action_saved(self): hog_function_2.refresh_from_db() assert json.dumps(hog_function_1.filters["bytecode"]) == snapshot( - '["_h", 32, "old-value-2", 32, "prop-2", 32, "properties", 1, 2, 11, 3, 1, 32, "change-value", 32, "prop-1", 32, "properties", 1, 2, 11, 32, "test-event", 32, "event", 1, 1, 11, 3, 2, 3, 1, 4, 2]' + f'["_H", {HOGQL_BYTECODE_VERSION}, 32, "test-event", 32, "event", 1, 1, 11, 32, "change-value", 32, "prop-1", 32, "properties", 1, 2, 11, 3, 2, 3, 1, 32, "old-value-2", 32, "prop-2", 32, "properties", 1, 2, 11, 3, 1, 4, 2]' ) assert json.dumps(hog_function_2.filters["bytecode"]) == snapshot( - '["_h", 32, "change-value", 32, "prop-1", 32, "properties", 1, 2, 11, 32, "test-event", 32, "event", 1, 1, 11, 3, 2, 3, 1, 4, 1]' + f'["_H", {HOGQL_BYTECODE_VERSION}, 32, "test-event", 32, "event", 1, 1, 11, 32, "change-value", 32, "prop-1", 32, "properties", 1, 2, 11, 3, 2, 3, 1, 4, 1]' ) def test_hog_functions_reload_on_team_saved(self): @@ -258,11 +259,11 @@ def test_hog_functions_reload_on_team_saved(self): ) # Check that the bytecode is correct - assert json.dumps(hog_function_1.filters["bytecode"]) == snapshot('["_h", 29]') + assert json.dumps(hog_function_1.filters["bytecode"]) == snapshot(f'["_H", {HOGQL_BYTECODE_VERSION}, 29]') assert json.dumps(hog_function_2.filters["bytecode"]) == snapshot( - '["_h", 32, "$pageview", 32, "event", 1, 1, 11, 3, 1, 4, 1]' + f'["_H", {HOGQL_BYTECODE_VERSION}, 32, "$pageview", 32, "event", 1, 1, 11, 3, 1, 4, 1]' ) - assert json.dumps(hog_function_3.filters["bytecode"]) == snapshot('["_h", 29]') + assert json.dumps(hog_function_3.filters["bytecode"]) == snapshot(f'["_H", {HOGQL_BYTECODE_VERSION}, 29]') # Modify the action and check that the bytecode is updated self.team.test_account_filters = [ @@ -277,9 +278,9 @@ def test_hog_functions_reload_on_team_saved(self): hog_function_3.refresh_from_db() assert json.dumps(hog_function_1.filters["bytecode"]) == snapshot( - '["_h", 32, "test", 32, "$pageview", 32, "properties", 1, 2, 2, "toString", 1, 2, "match", 2, 47, 3, 35, 33, 0, 32, "^(localhost|127\\\\.0\\\\.0\\\\.1)($|:)", 32, "$host", 32, "properties", 1, 2, 2, "toString", 1, 2, "match", 2, 47, 3, 35, 33, 0, 3, 2]' + f'["_H", {HOGQL_BYTECODE_VERSION}, 32, "$host", 32, "properties", 1, 2, 2, "toString", 1, 32, "^(localhost|127\\\\.0\\\\.0\\\\.1)($|:)", 2, "match", 2, 47, 3, 35, 33, 0, 32, "$pageview", 32, "properties", 1, 2, 2, "toString", 1, 32, "test", 2, "match", 2, 47, 3, 35, 33, 0, 3, 2]' ) assert json.dumps(hog_function_2.filters["bytecode"]) == snapshot( - '["_h", 32, "$pageview", 32, "event", 1, 1, 11, 32, "test", 32, "$pageview", 32, "properties", 1, 2, 2, "toString", 1, 2, "match", 2, 47, 3, 35, 33, 0, 32, "^(localhost|127\\\\.0\\\\.0\\\\.1)($|:)", 32, "$host", 32, "properties", 1, 2, 2, "toString", 1, 2, "match", 2, 47, 3, 35, 33, 0, 3, 3, 4, 1]' + f'["_H", {HOGQL_BYTECODE_VERSION}, 32, "$host", 32, "properties", 1, 2, 2, "toString", 1, 32, "^(localhost|127\\\\.0\\\\.0\\\\.1)($|:)", 2, "match", 2, 47, 3, 35, 33, 0, 32, "$pageview", 32, "properties", 1, 2, 2, "toString", 1, 32, "test", 2, "match", 2, 47, 3, 35, 33, 0, 32, "$pageview", 32, "event", 1, 1, 11, 3, 3, 4, 1]' ) - assert json.dumps(hog_function_3.filters["bytecode"]) == snapshot('["_h", 29]') + assert json.dumps(hog_function_3.filters["bytecode"]) == snapshot(f'["_H", {HOGQL_BYTECODE_VERSION}, 29]') diff --git a/posthog/models/uploaded_media.py b/posthog/models/uploaded_media.py index 2b31f348263cb..8810d13616ddc 100644 --- a/posthog/models/uploaded_media.py +++ b/posthog/models/uploaded_media.py @@ -20,15 +20,15 @@ class ObjectStorageUnavailable(Exception): class UploadedMedia(UUIDModel): - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + created_at = models.DateTimeField(auto_now_add=True, blank=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) # path in object storage or some other location identifier for the asset # 1000 characters would hold a 20 UUID forward slash separated path with space to spare - media_location: models.TextField = models.TextField(null=True, blank=True, max_length=1000) - content_type: models.TextField = models.TextField(null=True, blank=True, max_length=100) - file_name: models.TextField = models.TextField(null=True, blank=True, max_length=1000) + media_location = models.TextField(null=True, blank=True, max_length=1000) + content_type = models.TextField(null=True, blank=True, max_length=100) + file_name = models.TextField(null=True, blank=True, max_length=1000) def get_absolute_url(self) -> str: return absolute_uri(f"/uploaded_media/{self.id}") diff --git a/posthog/models/user.py b/posthog/models/user.py index f896ab7bb9a6b..621c1d36429a7 100644 --- a/posthog/models/user.py +++ b/posthog/models/user.py @@ -140,28 +140,26 @@ class User(AbstractUser, UUIDClassicModel): current_team = models.ForeignKey("posthog.Team", models.SET_NULL, null=True, related_name="teams_currently+") email = models.EmailField(_("email address"), unique=True) pending_email = models.EmailField(_("pending email address awaiting verification"), null=True, blank=True) - temporary_token: models.CharField = models.CharField(max_length=200, null=True, blank=True, unique=True) - distinct_id: models.CharField = models.CharField(max_length=200, null=True, blank=True, unique=True) - is_email_verified: models.BooleanField = models.BooleanField(null=True, blank=True) - requested_password_reset_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) - has_seen_product_intro_for: models.JSONField = models.JSONField(null=True, blank=True) - strapi_id: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(null=True, blank=True) + temporary_token = models.CharField(max_length=200, null=True, blank=True, unique=True) + distinct_id = models.CharField(max_length=200, null=True, blank=True, unique=True) + is_email_verified = models.BooleanField(null=True, blank=True) + requested_password_reset_at = models.DateTimeField(null=True, blank=True) + has_seen_product_intro_for = models.JSONField(null=True, blank=True) + strapi_id = models.PositiveSmallIntegerField(null=True, blank=True) # Preferences / configuration options - theme_mode: models.CharField = models.CharField(max_length=20, null=True, blank=True, choices=ThemeMode.choices) + theme_mode = models.CharField(max_length=20, null=True, blank=True, choices=ThemeMode.choices) # These override the notification settings - partial_notification_settings: models.JSONField = models.JSONField(null=True, blank=True) - anonymize_data: models.BooleanField = models.BooleanField(default=False, null=True, blank=True) - toolbar_mode: models.CharField = models.CharField( - max_length=200, null=True, blank=True, choices=TOOLBAR_CHOICES, default=TOOLBAR - ) - hedgehog_config: models.JSONField = models.JSONField(null=True, blank=True) + partial_notification_settings = models.JSONField(null=True, blank=True) + anonymize_data = models.BooleanField(default=False, null=True, blank=True) + toolbar_mode = models.CharField(max_length=200, null=True, blank=True, choices=TOOLBAR_CHOICES, default=TOOLBAR) + hedgehog_config = models.JSONField(null=True, blank=True) # DEPRECATED - events_column_config: models.JSONField = models.JSONField(default=events_column_config_default) + events_column_config = models.JSONField(default=events_column_config_default) # DEPRECATED - Most emails are done via 3rd parties and we use their opt/in out tooling - email_opt_in: models.BooleanField = models.BooleanField(default=False, null=True, blank=True) + email_opt_in = models.BooleanField(default=False, null=True, blank=True) # Remove unused attributes from `AbstractUser` username = None diff --git a/posthog/models/user_scene_personalisation.py b/posthog/models/user_scene_personalisation.py index 8b745f67a6808..fe19406ebe121 100644 --- a/posthog/models/user_scene_personalisation.py +++ b/posthog/models/user_scene_personalisation.py @@ -4,10 +4,10 @@ class UserScenePersonalisation(UUIDModel): - scene: models.CharField = models.CharField(max_length=200) - dashboard: models.ForeignKey = models.ForeignKey("Dashboard", on_delete=models.CASCADE, null=True, blank=True) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, null=True, blank=True) - user: models.ForeignKey = models.ForeignKey( + scene = models.CharField(max_length=200) + dashboard = models.ForeignKey("Dashboard", on_delete=models.CASCADE, null=True, blank=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE, null=True, blank=True) + user = models.ForeignKey( "User", on_delete=models.CASCADE, null=True, diff --git a/posthog/models/utils.py b/posthog/models/utils.py index 796c47ee3d163..e9498ce32990e 100644 --- a/posthog/models/utils.py +++ b/posthog/models/utils.py @@ -139,23 +139,23 @@ def uuid7(unix_ms_time: Optional[Union[int, str]] = None, random: Optional[Union class CreatedMetaFields(models.Model): - created_by: models.ForeignKey = models.ForeignKey("posthog.User", on_delete=models.SET_NULL, null=True, blank=True) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) + created_by = models.ForeignKey("posthog.User", on_delete=models.SET_NULL, null=True, blank=True) + created_at = models.DateTimeField(auto_now_add=True) class Meta: abstract = True class UpdatedMetaFields(models.Model): - updated_at: models.DateTimeField = models.DateTimeField(auto_now=True, null=True, blank=True) + updated_at = models.DateTimeField(auto_now=True, null=True, blank=True) class Meta: abstract = True class DeletedMetaFields(models.Model): - deleted: models.BooleanField = models.BooleanField(null=True, blank=True, default=False) - deleted_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) + deleted = models.BooleanField(null=True, blank=True, default=False) + deleted_at = models.DateTimeField(null=True, blank=True) class Meta: abstract = True @@ -164,20 +164,20 @@ class Meta: class UUIDModel(models.Model): """Base Django Model with default autoincremented ID field replaced with UUIDT.""" + id = models.UUIDField(primary_key=True, default=UUIDT, editable=False) + class Meta: abstract = True - id: models.UUIDField = models.UUIDField(primary_key=True, default=UUIDT, editable=False) - class UUIDClassicModel(models.Model): """Base Django Model with default autoincremented ID field kept and a UUIDT field added.""" + uuid = models.UUIDField(unique=True, default=UUIDT, editable=False) + class Meta: abstract = True - uuid: models.UUIDField = models.UUIDField(unique=True, default=UUIDT, editable=False) - def sane_repr(*attrs: str, include_id=True) -> Callable[[object], str]: if "id" not in attrs and "pk" not in attrs and include_id: diff --git a/posthog/schema.py b/posthog/schema.py index 1a920c6b76606..dd666bdc05b58 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -12,14 +12,6 @@ class SchemaRoot(RootModel[Any]): root: Any -class AbsoluteThreshold(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - lower: Optional[float] = None - upper: Optional[float] = None - - class MathGroupTypeIndex(float, Enum): NUMBER_0 = 0 NUMBER_1 = 1 @@ -36,11 +28,43 @@ class AggregationAxisFormat(StrEnum): PERCENTAGE_SCALED = "percentage_scaled" -class AnomalyCondition(BaseModel): +class AlertCheck(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + calculated_value: float + created_at: str + id: str + state: str + targets_notified: bool + + +class AlertCondition(BaseModel): + pass + model_config = ConfigDict( + extra="forbid", + ) + + +class AlertTypeBase(BaseModel): model_config = ConfigDict( extra="forbid", ) - absoluteThreshold: AbsoluteThreshold + condition: AlertCondition + enabled: bool + insight: float + name: str + + +class AlertTypeWrite(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + condition: AlertCondition + enabled: bool + insight: float + name: str + subscribed_users: list[int] class Kind(StrEnum): @@ -560,6 +584,19 @@ class GoalLine(BaseModel): value: float +class HedgehogColorOptions(StrEnum): + GREEN = "green" + RED = "red" + BLUE = "blue" + PURPLE = "purple" + DARK = "dark" + LIGHT = "light" + SEPIA = "sepia" + INVERT = "invert" + INVERT_HUE = "invert-hue" + GREYSCALE = "greyscale" + + class HogLanguage(StrEnum): HOG = "hog" HOG_JSON = "hogJson" @@ -581,6 +618,7 @@ class HogQLNotice(BaseModel): class BounceRatePageViewMode(StrEnum): COUNT_PAGEVIEWS = "count_pageviews" UNIQ_URLS = "uniq_urls" + UNIQ_PAGE_SCREEN_AUTOCAPTURES = "uniq_page_screen_autocaptures" class InCohortVia(StrEnum): @@ -618,6 +656,7 @@ class PersonsOnEventsMode(StrEnum): class PropertyGroupsMode(StrEnum): ENABLED = "enabled" DISABLED = "disabled" + OPTIMIZED = "optimized" class SessionTableVersion(StrEnum): @@ -700,6 +739,14 @@ class InsightNodeKind(StrEnum): LIFECYCLE_QUERY = "LifecycleQuery" +class InsightsThresholdAbsolute(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + lower: Optional[float] = None + upper: Optional[float] = None + + class IntervalType(StrEnum): MINUTE = "minute" HOUR = "hour" @@ -715,6 +762,22 @@ class LifecycleToggle(StrEnum): DORMANT = "dormant" +class MatchedRecordingEvent(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + uuid: str + + +class MinimalHedgehogConfig(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + accessories: list[str] + color: Optional[HedgehogColorOptions] = None + use_as_profile: bool + + class MultipleBreakdownType(StrEnum): PERSON = "person" EVENT = "event" @@ -737,6 +800,7 @@ class NodeKind(StrEnum): FUNNELS_ACTORS_QUERY = "FunnelsActorsQuery" FUNNEL_CORRELATION_ACTORS_QUERY = "FunnelCorrelationActorsQuery" SESSIONS_TIMELINE_QUERY = "SessionsTimelineQuery" + RECORDINGS_QUERY = "RecordingsQuery" SESSION_ATTRIBUTION_EXPLORER_QUERY = "SessionAttributionExplorerQuery" ERROR_TRACKING_QUERY = "ErrorTrackingQuery" DATA_TABLE_NODE = "DataTableNode" @@ -815,6 +879,19 @@ class PathsFilterLegacy(BaseModel): step_limit: Optional[int] = None +class PersonType(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + created_at: Optional[str] = None + distinct_ids: list[str] + id: Optional[str] = None + is_identified: Optional[bool] = None + name: Optional[str] = None + properties: dict[str, Any] + uuid: Optional[str] = None + + class PropertyFilterType(StrEnum): META = "meta" EVENT = "event" @@ -824,6 +901,7 @@ class PropertyFilterType(StrEnum): SESSION = "session" COHORT = "cohort" RECORDING = "recording" + LOG_ENTRY = "log_entry" GROUP = "group" HOGQL = "hogql" DATA_WAREHOUSE = "data_warehouse" @@ -1045,6 +1123,17 @@ class SessionPropertyFilter(BaseModel): value: Optional[Union[str, float, list[Union[str, float]]]] = None +class SnapshotSource(StrEnum): + WEB = "web" + MOBILE = "mobile" + UNKNOWN = "unknown" + + +class Storage(StrEnum): + OBJECT_STORAGE_LTS = "object_storage_lts" + OBJECT_STORAGE = "object_storage" + + class StepOrderValue(StrEnum): STRICT = "strict" UNORDERED = "unordered" @@ -1123,6 +1212,7 @@ class TaxonomicFilterGroupType(StrEnum): SESSION_PROPERTIES = "session_properties" HOGQL_EXPRESSION = "hogql_expression" NOTEBOOKS = "notebooks" + LOG_ENTRIES = "log_entries" REPLAY = "replay" @@ -1255,6 +1345,20 @@ class TrendsQueryResponse(BaseModel): ) +class UserBasicType(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + distinct_id: str + email: str + first_name: str + hedgehog_config: Optional[MinimalHedgehogConfig] = None + id: float + is_email_verified: Optional[Any] = None + last_name: Optional[str] = None + uuid: str + + class ActionsPie(BaseModel): model_config = ConfigDict( extra="forbid", @@ -2471,6 +2575,13 @@ class InsightActorsQueryBase(BaseModel): response: Optional[ActorsQueryResponse] = None +class InsightThreshold(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + absoluteThreshold: Optional[InsightsThresholdAbsolute] = None + + class LifecycleFilter(BaseModel): model_config = ConfigDict( extra="forbid", @@ -2510,6 +2621,25 @@ class LifecycleQueryResponse(BaseModel): ) +class LogEntryPropertyFilter(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + key: str + label: Optional[str] = None + operator: PropertyOperator + type: Literal["log_entry"] = "log_entry" + value: Optional[Union[str, float, list[Union[str, float]]]] = None + + +class MatchedRecording(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + events: list[MatchedRecordingEvent] + session_id: Optional[str] = None + + class MultipleBreakdownOptions(BaseModel): model_config = ConfigDict( extra="forbid", @@ -3218,6 +3348,35 @@ class SessionAttributionExplorerQuery(BaseModel): response: Optional[SessionAttributionExplorerQueryResponse] = None +class SessionRecordingType(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + active_seconds: Optional[float] = None + click_count: Optional[float] = None + console_error_count: Optional[float] = None + console_log_count: Optional[float] = None + console_warn_count: Optional[float] = None + distinct_id: Optional[str] = None + email: Optional[str] = None + end_time: str = Field(..., description="When the recording ends in ISO format.") + id: str + inactive_seconds: Optional[float] = None + keypress_count: Optional[float] = None + matching_events: Optional[list[MatchedRecording]] = Field(default=None, description="List of matching events. *") + mouse_activity_count: Optional[float] = Field( + default=None, description="count of all mouse activity in the recording, not just clicks" + ) + person: Optional[PersonType] = None + recording_duration: float = Field(..., description="Length of recording in seconds.") + snapshot_source: SnapshotSource + start_time: str = Field(..., description="When the recording starts in ISO format.") + start_url: Optional[str] = None + storage: Optional[Storage] = Field(default=None, description="Where this recording information was loaded from") + summary: Optional[str] = None + viewed: bool = Field(..., description="Whether this recording has been viewed already.") + + class SessionsTimelineQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", @@ -3301,6 +3460,31 @@ class WebTopClicksQuery(BaseModel): useSessionsTable: Optional[bool] = None +class Threshold(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + configuration: InsightThreshold + + +class AlertType(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + checks: list[AlertCheck] + condition: AlertCondition + created_at: str + created_by: UserBasicType + enabled: bool + id: str + insight: float + last_notified_at: str + name: str + state: str + subscribed_users: list[UserBasicType] + threshold: Threshold + + class AnyResponseType( RootModel[ Union[ @@ -3434,6 +3618,7 @@ class DashboardFilter(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3490,6 +3675,7 @@ class DataWarehouseNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3521,6 +3707,7 @@ class DataWarehouseNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3573,6 +3760,7 @@ class EntityNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3602,6 +3790,7 @@ class EntityNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3629,6 +3818,7 @@ class EventsNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3660,6 +3850,7 @@ class EventsNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3690,6 +3881,7 @@ class EventsQuery(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3719,6 +3911,7 @@ class EventsQuery(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3747,6 +3940,7 @@ class FunnelExclusionActionsNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3779,6 +3973,7 @@ class FunnelExclusionActionsNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3806,6 +4001,7 @@ class FunnelExclusionEventsNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3839,6 +4035,7 @@ class FunnelExclusionEventsNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3866,6 +4063,7 @@ class HogQLFilters(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3922,6 +4120,7 @@ class PersonsNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3949,6 +4148,7 @@ class PersonsNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -3977,6 +4177,7 @@ class PropertyGroupFilterValue(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4009,6 +4210,14 @@ class QueryResponseAlternative24(BaseModel): ) +class RecordingsQueryResponse(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + has_next: bool + results: list[SessionRecordingType] + + class RetentionQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", @@ -4062,6 +4271,7 @@ class ActionsNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4092,6 +4302,7 @@ class ActionsNode(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4164,6 +4375,67 @@ class PropertyGroupFilter(BaseModel): values: list[PropertyGroupFilterValue] +class RecordingsQuery(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + actions: Optional[list[dict[str, Any]]] = None + console_log_filters: Optional[list[LogEntryPropertyFilter]] = None + date_from: Optional[str] = None + date_to: Optional[str] = None + events: Optional[list[dict[str, Any]]] = None + filter_test_accounts: Optional[bool] = None + having_predicates: Optional[ + list[ + Union[ + EventPropertyFilter, + PersonPropertyFilter, + ElementPropertyFilter, + SessionPropertyFilter, + CohortPropertyFilter, + RecordingPropertyFilter, + LogEntryPropertyFilter, + GroupPropertyFilter, + FeaturePropertyFilter, + HogQLPropertyFilter, + EmptyPropertyFilter, + DataWarehousePropertyFilter, + DataWarehousePersonPropertyFilter, + ] + ] + ] = None + kind: Literal["RecordingsQuery"] = "RecordingsQuery" + limit: Optional[int] = None + modifiers: Optional[HogQLQueryModifiers] = Field( + default=None, description="Modifiers used when performing the query" + ) + offset: Optional[int] = None + operand: Optional[FilterLogicalOperator] = None + order: Union[DurationType, str] + person_uuid: Optional[str] = None + properties: Optional[ + list[ + Union[ + EventPropertyFilter, + PersonPropertyFilter, + ElementPropertyFilter, + SessionPropertyFilter, + CohortPropertyFilter, + RecordingPropertyFilter, + LogEntryPropertyFilter, + GroupPropertyFilter, + FeaturePropertyFilter, + HogQLPropertyFilter, + EmptyPropertyFilter, + DataWarehousePropertyFilter, + DataWarehousePersonPropertyFilter, + ] + ] + ] = None + response: Optional[RecordingsQueryResponse] = None + session_ids: Optional[list[str]] = None + + class RetentionQuery(BaseModel): model_config = ConfigDict( extra="forbid", @@ -4187,6 +4459,7 @@ class RetentionQuery(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4230,6 +4503,7 @@ class StickinessQuery(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4280,6 +4554,7 @@ class TrendsQuery(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4349,6 +4624,7 @@ class FunnelsQuery(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4390,6 +4666,7 @@ class InsightsQueryBaseFunnelsQueryResponse(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4428,6 +4705,7 @@ class InsightsQueryBaseLifecycleQueryResponse(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4466,6 +4744,7 @@ class InsightsQueryBasePathsQueryResponse(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4504,6 +4783,7 @@ class InsightsQueryBaseRetentionQueryResponse(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4542,6 +4822,7 @@ class InsightsQueryBaseTrendsQueryResponse(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4587,6 +4868,7 @@ class LifecycleQuery(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4777,6 +5059,7 @@ class PathsQuery(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, @@ -4857,6 +5140,7 @@ class FunnelCorrelationActorsQuery(BaseModel): SessionPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, + LogEntryPropertyFilter, GroupPropertyFilter, FeaturePropertyFilter, HogQLPropertyFilter, diff --git a/posthog/session_recordings/models/session_recording.py b/posthog/session_recordings/models/session_recording.py index 359df2faf94e7..be955e1981db9 100644 --- a/posthog/session_recordings/models/session_recording.py +++ b/posthog/session_recordings/models/session_recording.py @@ -27,33 +27,33 @@ class Meta: # https://github.com/PostHog/posthog-js/blob/e0dc2c005cfb5dd62b7c876676bcffe1654417a7/src/utils.ts#L457-L458 # We create recording objects with both UUIDT and a unique session_id field to remain backwards compatible. # All other models related to the session recording model uses this unique `session_id` to create the link. - session_id: models.CharField = models.CharField(unique=True, max_length=200) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, null=True, blank=True) - deleted: models.BooleanField = models.BooleanField(null=True, blank=True) - object_storage_path: models.CharField = models.CharField(max_length=200, null=True, blank=True) + session_id = models.CharField(unique=True, max_length=200) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + created_at = models.DateTimeField(auto_now_add=True, null=True, blank=True) + deleted = models.BooleanField(null=True, blank=True) + object_storage_path = models.CharField(max_length=200, null=True, blank=True) - distinct_id: models.CharField = models.CharField(max_length=400, null=True, blank=True) + distinct_id = models.CharField(max_length=400, null=True, blank=True) - duration: models.IntegerField = models.IntegerField(blank=True, null=True) - active_seconds: models.IntegerField = models.IntegerField(blank=True, null=True) - inactive_seconds: models.IntegerField = models.IntegerField(blank=True, null=True) - start_time: models.DateTimeField = models.DateTimeField(blank=True, null=True) - end_time: models.DateTimeField = models.DateTimeField(blank=True, null=True) + duration = models.IntegerField(blank=True, null=True) + active_seconds = models.IntegerField(blank=True, null=True) + inactive_seconds = models.IntegerField(blank=True, null=True) + start_time = models.DateTimeField(blank=True, null=True) + end_time = models.DateTimeField(blank=True, null=True) - click_count: models.IntegerField = models.IntegerField(blank=True, null=True) - keypress_count: models.IntegerField = models.IntegerField(blank=True, null=True) - mouse_activity_count: models.IntegerField = models.IntegerField(blank=True, null=True) + click_count = models.IntegerField(blank=True, null=True) + keypress_count = models.IntegerField(blank=True, null=True) + mouse_activity_count = models.IntegerField(blank=True, null=True) - console_log_count: models.IntegerField = models.IntegerField(blank=True, null=True) - console_warn_count: models.IntegerField = models.IntegerField(blank=True, null=True) - console_error_count: models.IntegerField = models.IntegerField(blank=True, null=True) + console_log_count = models.IntegerField(blank=True, null=True) + console_warn_count = models.IntegerField(blank=True, null=True) + console_error_count = models.IntegerField(blank=True, null=True) - start_url: models.CharField = models.CharField(blank=True, null=True, max_length=512) + start_url = models.CharField(blank=True, null=True, max_length=512) # we can't store storage version in the stored content # as we might need to know the version before knowing how to load the data - storage_version: models.CharField = models.CharField(blank=True, null=True, max_length=20) + storage_version = models.CharField(blank=True, null=True, max_length=20) # DYNAMIC FIELDS diff --git a/posthog/session_recordings/models/session_recording_event.py b/posthog/session_recordings/models/session_recording_event.py index 75f9a9c116251..0fc90026a4900 100644 --- a/posthog/session_recordings/models/session_recording_event.py +++ b/posthog/session_recordings/models/session_recording_event.py @@ -6,6 +6,14 @@ # DEPRECATED: PostHog model is no longer supported or used class SessionRecordingEvent(models.Model): + created_at = models.DateTimeField(auto_now_add=True, null=True, blank=True) + timestamp = models.DateTimeField(default=timezone.now, blank=True) + team = models.ForeignKey(Team, on_delete=models.CASCADE) + distinct_id = models.CharField(max_length=200) + session_id = models.CharField(max_length=200) + window_id = models.CharField(max_length=200, null=True, blank=True) + snapshot_data = models.JSONField(default=dict) + class Meta: indexes = [ models.Index(fields=["team_id", "session_id"]), @@ -15,21 +23,13 @@ class Meta: # models.Index(fields=["team_id", "timestamp"]), ] - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, null=True, blank=True) - timestamp: models.DateTimeField = models.DateTimeField(default=timezone.now, blank=True) - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) - distinct_id: models.CharField = models.CharField(max_length=200) - session_id: models.CharField = models.CharField(max_length=200) - window_id: models.CharField = models.CharField(max_length=200, null=True, blank=True) - snapshot_data: models.JSONField = models.JSONField(default=dict) - class SessionRecordingViewed(models.Model): + team = models.ForeignKey(Team, on_delete=models.CASCADE) + user = models.ForeignKey("User", on_delete=models.CASCADE) + created_at = models.DateTimeField(auto_now_add=True, null=True, blank=True) + session_id = models.CharField(max_length=200) + class Meta: unique_together = (("team_id", "user_id", "session_id"),) indexes = [models.Index(fields=["team_id", "user_id", "session_id"])] - - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) - user: models.ForeignKey = models.ForeignKey("User", on_delete=models.CASCADE) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, null=True, blank=True) - session_id: models.CharField = models.CharField(max_length=200) diff --git a/posthog/session_recordings/models/session_recording_playlist.py b/posthog/session_recordings/models/session_recording_playlist.py index 9c198dbd83e3c..4a272249519a6 100644 --- a/posthog/session_recordings/models/session_recording_playlist.py +++ b/posthog/session_recordings/models/session_recording_playlist.py @@ -5,18 +5,18 @@ class SessionRecordingPlaylist(models.Model): - short_id: models.CharField = models.CharField(max_length=12, blank=True, default=generate_short_id) - name: models.CharField = models.CharField(max_length=400, null=True, blank=True) - derived_name: models.CharField = models.CharField(max_length=400, null=True, blank=True) - description: models.TextField = models.TextField(blank=True) - team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) - pinned: models.BooleanField = models.BooleanField(default=False) - deleted: models.BooleanField = models.BooleanField(default=False) - filters: models.JSONField = models.JSONField(default=dict) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) - created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) - last_modified_at: models.DateTimeField = models.DateTimeField(default=timezone.now) - last_modified_by: models.ForeignKey = models.ForeignKey( + short_id = models.CharField(max_length=12, blank=True, default=generate_short_id) + name = models.CharField(max_length=400, null=True, blank=True) + derived_name = models.CharField(max_length=400, null=True, blank=True) + description = models.TextField(blank=True) + team = models.ForeignKey("Team", on_delete=models.CASCADE) + pinned = models.BooleanField(default=False) + deleted = models.BooleanField(default=False) + filters = models.JSONField(default=dict) + created_at = models.DateTimeField(auto_now_add=True, blank=True) + created_by = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) + last_modified_at = models.DateTimeField(default=timezone.now) + last_modified_by = models.ForeignKey( "User", on_delete=models.SET_NULL, null=True, @@ -25,7 +25,7 @@ class SessionRecordingPlaylist(models.Model): ) # DEPRECATED - is_static: models.BooleanField = models.BooleanField(default=False) + is_static = models.BooleanField(default=False) # Changing these fields materially alters the Playlist, so these count for the "last_modified_*" fields MATERIAL_PLAYLIST_FIELDS = {"name", "description", "filters"} diff --git a/posthog/session_recordings/models/session_recording_playlist_item.py b/posthog/session_recordings/models/session_recording_playlist_item.py index 73d38815fdef8..ba81e08dfd1d5 100644 --- a/posthog/session_recordings/models/session_recording_playlist_item.py +++ b/posthog/session_recordings/models/session_recording_playlist_item.py @@ -2,10 +2,7 @@ class SessionRecordingPlaylistItem(models.Model): - class Meta: - unique_together = ("recording", "playlist") - - recording: models.ForeignKey = models.ForeignKey( + recording = models.ForeignKey( "SessionRecording", related_name="playlist_items", on_delete=models.CASCADE, @@ -13,14 +10,17 @@ class Meta: null=True, blank=True, ) - playlist: models.ForeignKey = models.ForeignKey( + playlist = models.ForeignKey( "SessionRecordingPlaylist", related_name="playlist_items", on_delete=models.CASCADE, ) - created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) + created_at = models.DateTimeField(auto_now_add=True, blank=True) # DEPRECATED: We hard delete as this is only a joiner table - deleted: models.BooleanField = models.BooleanField(null=True, blank=True) + deleted = models.BooleanField(null=True, blank=True) # DEPRECATED: Use recording_id instead - session_id: models.CharField = models.CharField(max_length=200, null=True, blank=True) + session_id = models.CharField(max_length=200, null=True, blank=True) + + class Meta: + unique_together = ("recording", "playlist") diff --git a/posthog/session_recordings/queries/session_recording_list_from_filters.py b/posthog/session_recordings/queries/session_recording_list_from_filters.py index b7b928f10690f..92d868e83f0b2 100644 --- a/posthog/session_recordings/queries/session_recording_list_from_filters.py +++ b/posthog/session_recordings/queries/session_recording_list_from_filters.py @@ -1,9 +1,8 @@ -import re from typing import Any, NamedTuple, cast, Optional, Union from datetime import datetime, timedelta from posthog.hogql import ast -from posthog.hogql.ast import Constant, CompareOperation +from posthog.hogql.ast import CompareOperation from posthog.hogql.parser import parse_select from posthog.hogql.property import entity_to_expr, property_to_expr from posthog.hogql.query import execute_hogql_query @@ -21,10 +20,6 @@ logger = structlog.get_logger(__name__) -def is_event_property(p: Property) -> bool: - return p.type == "event" or (p.type == "hogql" and bool(re.search(r"(? bool: return p.type == "person" or (p.type == "hogql" and "person.properties" in p.key) @@ -137,8 +132,7 @@ def get_query(self): ) def _order_by_clause(self) -> ast.Field: - order = self._filter.target_entity_order or "start_time" - return ast.Field(chain=[order]) + return ast.Field(chain=[self._filter.order]) def _where_predicates(self) -> Union[ast.And, ast.Or]: exprs: list[ast.Expr] = [ @@ -213,36 +207,14 @@ def _where_predicates(self) -> Union[ast.And, ast.Or]: ) optional_exprs.append(property_to_expr(remaining_properties, team=self._team, scope="replay")) - console_logs_predicates: list[ast.Expr] = [] - if self._filter.console_logs_filter: - console_logs_predicates.append( - ast.CompareOperation( - op=ast.CompareOperationOp.In, - left=ast.Field(chain=["level"]), - right=ast.Constant(value=self._filter.console_logs_filter), - ) - ) - - if self._filter.console_search_query: - console_logs_predicates.append( - ast.CompareOperation( - op=ast.CompareOperationOp.Gt, - left=ast.Call( - name="positionCaseInsensitive", - args=[ - ast.Field(chain=["message"]), - ast.Constant(value=self._filter.console_search_query), - ], - ), - right=ast.Constant(value=0), - ) - ) - - if console_logs_predicates: + if self._filter.console_log_filters.values: + # print(self._filter.console_log_filters.type) console_logs_subquery = ast.SelectQuery( select=[ast.Field(chain=["log_source_id"])], select_from=ast.JoinExpr(table=ast.Field(chain=["console_logs_log_entries"])), - where=self._filter.ast_operand(exprs=console_logs_predicates), + where=self._filter.ast_operand( + exprs=[property_to_expr(self._filter.console_log_filters, team=self._team)] + ), ) optional_exprs.append( @@ -258,44 +230,11 @@ def _where_predicates(self) -> Union[ast.And, ast.Or]: return ast.And(exprs=exprs) - def _having_predicates(self) -> ast.And | Constant: - exprs: list[ast.Expr] = [] - - if self._filter.recording_duration_filter: - op = ( - ast.CompareOperationOp.GtEq - if self._filter.recording_duration_filter.operator == "gt" - or self._filter.recording_duration_filter.operator == "gte" - else ast.CompareOperationOp.LtEq - ) - exprs.append( - ast.CompareOperation( - op=op, - left=ast.Field(chain=[self._filter.duration_type_filter]), - right=ast.Constant(value=self._filter.recording_duration_filter.value), - ), - ) - - if self._filter.snapshot_source_filter: - op = ( - ast.CompareOperationOp.In - if self._filter.snapshot_source_filter.operator == "exact" - else ast.CompareOperationOp.NotIn - ) - exprs.append( - ast.CompareOperation( - op=op, - left=ast.Call(name="argMinMerge", args=[ast.Field(chain=["s", "snapshot_source"])]), - right=ast.Constant(value=self._filter.snapshot_source_filter.value), - ), - ) - - return ast.And(exprs=exprs) if exprs else ast.Constant(value=True) + def _having_predicates(self) -> ast.Expr: + return property_to_expr(self._filter.having_predicates, team=self._team, scope="replay") def _strip_person_and_event_properties(self, property_group: PropertyGroup) -> PropertyGroup | None: - property_groups_to_keep = [ - g for g in property_group.flat if not is_event_property(g) and not is_person_property(g) - ] + property_groups_to_keep = [g for g in property_group.flat if not is_person_property(g)] return ( PropertyGroup( @@ -351,7 +290,7 @@ def person_properties(self) -> PropertyGroup | None: @cached_property def _where_predicates(self) -> ast.Expr: return ( - property_to_expr(self.person_properties, team=self._team, scope="replay_pdi") + property_to_expr(self.person_properties, team=self._team) if self.person_properties else ast.Constant(value=True) ) @@ -481,7 +420,7 @@ def _select_from_events(self, select_expr: ast.Expr) -> ast.SelectQuery: def get_query_for_session_id_matching(self) -> ast.SelectQuery | ast.SelectUnionQuery | None: use_poe = poe_is_active(self._team) and self.person_properties - if self._filter.entities or self.event_properties or use_poe: + if self._filter.entities or use_poe: return self._select_from_events(ast.Alias(alias="session_id", expr=ast.Field(chain=["$session_id"]))) else: return None @@ -551,9 +490,6 @@ def _where_predicates(self) -> ast.Expr: if event_where_exprs: exprs.append(self._filter.events_operand(exprs=event_where_exprs)) - if self.event_properties: - exprs.append(property_to_expr(self.event_properties, team=self._team, scope="replay")) - if self._team.person_on_events_mode and self.person_properties: exprs.append(property_to_expr(self.person_properties, team=self._team, scope="event")) @@ -583,10 +519,6 @@ def _having_predicates(self) -> ast.Expr: return ast.Constant(value=True) - @cached_property - def event_properties(self): - return [g for g in self._filter.property_groups.flat if is_event_property(g)] - @cached_property def person_properties(self) -> PropertyGroup | None: person_property_groups = [g for g in self._filter.property_groups.flat if is_person_property(g)] diff --git a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr index efdf25db926cb..9dd57f56763e8 100644 --- a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr +++ b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr @@ -180,7 +180,7 @@ GROUP BY events.`$session_id` HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'custom-event'])))) GROUP BY s.session_id - HAVING ifNull(greaterOrEquals(duration, 60), 0) + HAVING 1 ORDER BY start_time DESC LIMIT 51 OFFSET 0 SETTINGS readonly=2, @@ -466,7 +466,7 @@ FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id - HAVING ifNull(greaterOrEquals(duration, 60), 0) + HAVING ifNull(greater(duration, 60), 0) ORDER BY start_time DESC LIMIT 51 OFFSET 0 SETTINGS readonly=2, @@ -498,7 +498,7 @@ FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id - HAVING ifNull(greaterOrEquals(active_seconds, 60), 0) + HAVING ifNull(greater(active_seconds, '60'), 0) ORDER BY start_time DESC LIMIT 51 OFFSET 0 SETTINGS readonly=2, @@ -530,7 +530,7 @@ FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id - HAVING ifNull(greaterOrEquals(inactive_seconds, 60), 0) + HAVING ifNull(greater(inactive_seconds, '60'), 0) ORDER BY start_time DESC LIMIT 51 OFFSET 0 SETTINGS readonly=2, @@ -978,7 +978,7 @@ FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id - HAVING ifNull(greaterOrEquals(duration, 60), 0) + HAVING ifNull(greater(duration, 60), 0) ORDER BY start_time DESC LIMIT 51 OFFSET 0 SETTINGS readonly=2, @@ -1010,7 +1010,7 @@ FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id - HAVING ifNull(lessOrEquals(duration, 60), 0) + HAVING ifNull(less(duration, 60), 0) ORDER BY start_time DESC LIMIT 51 OFFSET 0 SETTINGS readonly=2, @@ -1190,7 +1190,7 @@ GROUP BY events.`$session_id` HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id - HAVING ifNull(greaterOrEquals(duration, 60), 0) + HAVING ifNull(greater(duration, 60), 0) ORDER BY start_time DESC LIMIT 51 OFFSET 0 SETTINGS readonly=2, @@ -1227,7 +1227,7 @@ GROUP BY events.`$session_id` HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id - HAVING ifNull(greaterOrEquals(active_seconds, 60), 0) + HAVING ifNull(greater(active_seconds, 60), 0) ORDER BY start_time DESC LIMIT 51 OFFSET 0 SETTINGS readonly=2, @@ -1405,15 +1405,19 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), and(in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS events__pdi___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + JOIN + (SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser`, + events.`$session_id` AS `$session_id` + FROM events PREWHERE greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))) + WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), and(in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_distinct_ids___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id FROM person_distinct_id2 WHERE equals(person_distinct_id2.team_id, 2) GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__pdi ON equals(events.distinct_id, events__pdi.distinct_id) + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids LEFT JOIN (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email FROM person @@ -1422,27 +1426,8 @@ FROM person WHERE equals(person.team_id, 2) GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__pdi__person ON equals(events__pdi.events__pdi___person_id, events__pdi__person.id) - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0), ifNull(equals(events__pdi__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1)), in(s.distinct_id, - (SELECT person_distinct_ids.distinct_id AS distinct_id - FROM - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_distinct_ids___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) - WHERE ifNull(equals(person_distinct_ids__person.properties___email, 'bla'), 0))))) + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) + WHERE ifNull(equals(person_distinct_ids__person.properties___email, 'bla'), 0))), ifNull(equals(s__events.`properties___$browser`, 'Chrome'), 0))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1580,15 +1565,19 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), and(in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS events__pdi___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + JOIN + (SELECT nullIf(nullIf(events.`mat_$browser`, ''), 'null') AS `properties___$browser`, + events.`$session_id` AS `$session_id` + FROM events PREWHERE greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))) + WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), and(in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_distinct_ids___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id FROM person_distinct_id2 WHERE equals(person_distinct_id2.team_id, 2) GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__pdi ON equals(events.distinct_id, events__pdi.distinct_id) + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids LEFT JOIN (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email FROM person @@ -1597,27 +1586,8 @@ FROM person WHERE equals(person.team_id, 2) GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__pdi__person ON equals(events__pdi.events__pdi___person_id, events__pdi__person.id) - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0), ifNull(equals(events__pdi__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1)), in(s.distinct_id, - (SELECT person_distinct_ids.distinct_id AS distinct_id - FROM - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_distinct_ids___person_id, argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) - WHERE ifNull(equals(person_distinct_ids__person.properties___email, 'bla'), 0))))) + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) + WHERE ifNull(equals(person_distinct_ids__person.properties___email, 'bla'), 0))), ifNull(equals(s__events.`properties___$browser`, 'Chrome'), 0))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2271,6 +2241,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s + JOIN + (SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', '') AS properties___is_internal_user, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser`, + events.`$session_id` AS `$session_id` + FROM events PREWHERE greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))) + WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), and(in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events @@ -2289,7 +2265,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__pdi__person ON equals(events__pdi.events__pdi___person_id, events__pdi__person.id) - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), 1), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0)), ifNull(notILike(events__pdi__person.properties___email, '%@posthog.com%'), 1)) + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), 1), ifNull(notILike(events__pdi__person.properties___email, '%@posthog.com%'), 1)) GROUP BY events.`$session_id` HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), in(s.distinct_id, (SELECT person_distinct_ids.distinct_id AS distinct_id @@ -2308,7 +2284,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) - WHERE ifNull(notILike(person_distinct_ids__person.properties___email, '%@posthog.com%'), 1))))) + WHERE ifNull(notILike(person_distinct_ids__person.properties___email, '%@posthog.com%'), 1))), and(ifNull(equals(s__events.properties___is_internal_user, 'false'), 0), ifNull(equals(s__events.`properties___$browser`, 'Chrome'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2377,6 +2353,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s + JOIN + (SELECT nullIf(nullIf(events.mat_is_internal_user, ''), 'null') AS properties___is_internal_user, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', '') AS `properties___$browser`, + events.`$session_id` AS `$session_id` + FROM events PREWHERE greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))) + WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), and(in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events @@ -2395,7 +2377,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__pdi__person ON equals(events__pdi.events__pdi___person_id, events__pdi__person.id) - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), 1), and(ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0)), ifNull(notILike(events__pdi__person.properties___email, '%@posthog.com%'), 1)) + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), 1), ifNull(notILike(events__pdi__person.properties___email, '%@posthog.com%'), 1)) GROUP BY events.`$session_id` HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), in(s.distinct_id, (SELECT person_distinct_ids.distinct_id AS distinct_id @@ -2414,7 +2396,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids__person ON equals(person_distinct_ids.person_distinct_ids___person_id, person_distinct_ids__person.id) - WHERE ifNull(notILike(person_distinct_ids__person.properties___email, '%@posthog.com%'), 1))))) + WHERE ifNull(notILike(person_distinct_ids__person.properties___email, '%@posthog.com%'), 1))), and(ifNull(equals(s__events.properties___is_internal_user, 'false'), 0), ifNull(equals(s__events.`properties___$browser`, 'Chrome'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2526,7 +2508,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(ifNull(in(console_logs_log_entries.level, ['warn', 'error']), 0), ifNull(greater(positionCaseInsensitive(console_logs_log_entries.message, 'message 4'), 0), 0))))) + WHERE or(or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0)), ifNull(equals(console_logs_log_entries.message, 'message 4'), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2564,7 +2546,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(ifNull(in(console_logs_log_entries.level, ['warn', 'error']), 0), ifNull(greater(positionCaseInsensitive(console_logs_log_entries.message, 'message 5'), 0), 0))))) + WHERE and(or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0)), ifNull(ilike(console_logs_log_entries.message, '%message 5%'), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2602,45 +2584,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(ifNull(in(console_logs_log_entries.level, ['warn', 'error']), 0), ifNull(greater(positionCaseInsensitive(console_logs_log_entries.message, 'MESSAGE 5'), 0), 0))))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestSessionRecordingsListFromFilters.test_filter_for_recordings_by_console_text.3 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count), - sum(s.keypress_count), - sum(s.mouse_activity_count), - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT console_logs_log_entries.log_source_id AS log_source_id - FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message - FROM log_entries - WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(ifNull(in(console_logs_log_entries.level, ['info']), 0), ifNull(greater(positionCaseInsensitive(console_logs_log_entries.message, 'message 5'), 0), 0))))) + WHERE and(ifNull(equals(console_logs_log_entries.level, 'info'), 0), ifNull(ilike(console_logs_log_entries.message, '%message 5%'), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2674,7 +2618,7 @@ FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id - HAVING ifNull(in(argMinMerge(s.snapshot_source), ['web']), 0) + HAVING ifNull(equals(argMinMerge(s.snapshot_source), 'web'), 0) ORDER BY start_time DESC LIMIT 51 OFFSET 0 SETTINGS readonly=2, @@ -2706,7 +2650,7 @@ FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id - HAVING ifNull(in(argMinMerge(s.snapshot_source), ['mobile']), 0) + HAVING ifNull(equals(argMinMerge(s.snapshot_source), 'mobile'), 0) ORDER BY start_time DESC LIMIT 51 OFFSET 0 SETTINGS readonly=2, @@ -2742,7 +2686,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE ifNull(in(console_logs_log_entries.level, ['error']), 0)))) + WHERE ifNull(equals(console_logs_log_entries.level, 'error'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2780,7 +2724,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE ifNull(in(console_logs_log_entries.level, ['info']), 0)))) + WHERE ifNull(equals(console_logs_log_entries.level, 'info'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2818,7 +2762,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE ifNull(in(console_logs_log_entries.level, ['info']), 0)))) + WHERE ifNull(equals(console_logs_log_entries.level, 'info'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2856,7 +2800,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE ifNull(in(console_logs_log_entries.level, ['warn']), 0)))) + WHERE ifNull(equals(console_logs_log_entries.level, 'warn'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2894,7 +2838,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE ifNull(in(console_logs_log_entries.level, ['warn']), 0)))) + WHERE ifNull(equals(console_logs_log_entries.level, 'warn'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2932,7 +2876,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE ifNull(in(console_logs_log_entries.level, ['info']), 0)))) + WHERE ifNull(equals(console_logs_log_entries.level, 'info'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2970,7 +2914,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE ifNull(in(console_logs_log_entries.level, ['warn', 'error']), 0)))) + WHERE or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3008,7 +2952,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE ifNull(in(console_logs_log_entries.level, ['info']), 0)))) + WHERE ifNull(equals(console_logs_log_entries.level, 'info'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3542,7 +3486,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(ifNull(in(console_logs_log_entries.level, ['warn']), 0), ifNull(greater(positionCaseInsensitive(console_logs_log_entries.message, 'random'), 0), 0))))) + WHERE and(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.message, 'random'), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3580,7 +3524,7 @@ (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message FROM log_entries WHERE and(equals(log_entries.team_id, 2), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE or(ifNull(in(console_logs_log_entries.level, ['warn']), 0), ifNull(greater(positionCaseInsensitive(console_logs_log_entries.message, 'random'), 0), 0))))) + WHERE or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.message, 'random'), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3951,12 +3895,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + JOIN + (SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', '') AS properties___is_internal_user, + events.`$session_id` AS `$session_id` + FROM events PREWHERE greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))) + WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__events.properties___is_internal_user, 'false'), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4025,12 +3969,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + JOIN + (SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', '') AS properties___is_internal_user, + events.`$session_id` AS `$session_id` + FROM events PREWHERE greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))) + WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__events.properties___is_internal_user, 'false'), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4099,12 +4043,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + JOIN + (SELECT nullIf(nullIf(events.mat_is_internal_user, ''), 'null') AS properties___is_internal_user, + events.`$session_id` AS `$session_id` + FROM events PREWHERE greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))) + WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__events.properties___is_internal_user, 'false'), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4173,12 +4117,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + JOIN + (SELECT nullIf(nullIf(events.mat_is_internal_user, ''), 'null') AS properties___is_internal_user, + events.`$session_id` AS `$session_id` + FROM events PREWHERE greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))) + WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__events.properties___is_internal_user, 'false'), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4247,12 +4191,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'true'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + JOIN + (SELECT replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', '') AS properties___is_internal_user, + events.`$session_id` AS `$session_id` + FROM events PREWHERE greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))) + WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__events.properties___is_internal_user, 'true'), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4321,12 +4265,12 @@ sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'true'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + JOIN + (SELECT nullIf(nullIf(events.mat_is_internal_user, ''), 'null') AS properties___is_internal_user, + events.`$session_id` AS `$session_id` + FROM events PREWHERE greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), minus(toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC'), toIntervalDay(90))) + WHERE equals(events.team_id, 2)) AS s__events ON equals(s.session_id, s__events.`$session_id`) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(equals(s__events.properties___is_internal_user, 'true'), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC diff --git a/posthog/session_recordings/queries/test/test_session_recording_list_from_filters.py b/posthog/session_recordings/queries/test/test_session_recording_list_from_filters.py index 9cd6a5709101d..5f57396df1a7a 100644 --- a/posthog/session_recordings/queries/test/test_session_recording_list_from_filters.py +++ b/posthog/session_recordings/queries/test/test_session_recording_list_from_filters.py @@ -234,8 +234,7 @@ def test_basic_query_active_sessions( (session_recordings, _, _) = self._filter_recordings_by( { - "duration_type_filter": "duration", - "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', + "having_predicates": '[{"type":"recording","key":"duration","value":60,"operator":"gt"}]', } ) @@ -249,8 +248,7 @@ def test_basic_query_active_sessions( (session_recordings, _, _) = self._filter_recordings_by( { - "duration_type_filter": "active_seconds", - "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', + "having_predicates": '[{"type":"recording","key":"active_seconds","value":"60","operator":"gt"}]', } ) @@ -260,8 +258,7 @@ def test_basic_query_active_sessions( (session_recordings, _, _) = self._filter_recordings_by( { - "duration_type_filter": "inactive_seconds", - "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', + "having_predicates": '[{"type":"recording","key":"inactive_seconds","value":"60","operator":"gt"}]', } ) @@ -423,21 +420,21 @@ def test_basic_query_with_ordering(self): ) (session_recordings) = self._filter_recordings_by( - {"no_filter": None, "limit": 3, "offset": 0, "entity_order": "active_seconds"} + {"no_filter": None, "limit": 3, "offset": 0, "order": "active_seconds"} ) ordered_by_activity = [(r["session_id"], r["active_seconds"]) for r in session_recordings.results] assert ordered_by_activity == [(session_id_two, 1.0), (session_id_one, 0.002)] (session_recordings) = self._filter_recordings_by( - {"no_filter": None, "limit": 3, "offset": 0, "entity_order": "console_error_count"} + {"no_filter": None, "limit": 3, "offset": 0, "order": "console_error_count"} ) ordered_by_errors = [(r["session_id"], r["console_error_count"]) for r in session_recordings.results] assert ordered_by_errors == [(session_id_one, 1012), (session_id_two, 430)] (session_recordings) = self._filter_recordings_by( - {"no_filter": None, "limit": 3, "offset": 0, "entity_order": "start_time"} + {"no_filter": None, "limit": 3, "offset": 0, "order": "start_time"} ) ordered_by_default = [(r["session_id"], r["start_time"]) for r in session_recordings.results] @@ -854,7 +851,6 @@ def test_event_filter_with_active_sessions( (session_recordings, _, _) = self._filter_recordings_by( { - "duration_type_filter": "duration", "events": [ { "id": "$pageview", @@ -863,7 +859,7 @@ def test_event_filter_with_active_sessions( "name": "$pageview", } ], - "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', + "having_predicates": '[{"type":"recording","key":"duration","value":60,"operator":"gt"}]', } ) @@ -873,7 +869,6 @@ def test_event_filter_with_active_sessions( (session_recordings, _, _) = self._filter_recordings_by( { - "duration_type_filter": "active_seconds", "events": [ { "id": "$pageview", @@ -882,7 +877,7 @@ def test_event_filter_with_active_sessions( "name": "$pageview", } ], - "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', + "having_predicates": '[{"type":"recording","key":"active_seconds","value":60,"operator":"gt"}]', } ) @@ -1300,12 +1295,12 @@ def test_duration_filter(self): ) (session_recordings, _, _) = self._filter_recordings_by( - {"session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}'} + {"having_predicates": '[{"type":"recording","key":"duration","value":60,"operator":"gt"}]'} ) assert [r["session_id"] for r in session_recordings] == [session_id_two] (session_recordings, _, _) = self._filter_recordings_by( - {"session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"lt"}'} + {"having_predicates": '[{"type":"recording","key":"duration","value":60,"operator":"lt"}]'} ) assert [r["session_id"] for r in session_recordings] == [session_id_one] @@ -1500,13 +1495,19 @@ def test_operand_or_filters(self): ) (session_recordings, _, _) = self._filter_recordings_by( - {"console_logs": ["warn"], "console_search_query": "random"} + { + "console_log_filters": '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "random", "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } ) assert len(session_recordings) == 1 assert session_recordings[0]["session_id"] == session_with_both_log_filters (session_recordings, _, _) = self._filter_recordings_by( - {"console_logs": ["warn"], "console_search_query": "random", "operand": "OR"} + { + "console_log_filters": '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "random", "operator": "exact", "type": "log_entry"}]', + "operand": "OR", + } ) assert len(session_recordings) == 2 @@ -2513,7 +2514,14 @@ def test_filter_for_recordings_with_console_logs(self): team_id=self.team.id, ) - (session_recordings, _, _) = self._filter_recordings_by({"console_logs": ["info"]}) + # (session_recordings, _, _) = self._filter_recordings_by({"console_logs": ["info"]}) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) actual = sorted( [(sr["session_id"], sr["console_log_count"]) for sr in session_recordings], @@ -2524,7 +2532,12 @@ def test_filter_for_recordings_with_console_logs(self): (with_logs_session_id, 4), ] - (session_recordings, _, _) = self._filter_recordings_by({"console_logs": ["warn"]}) + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) assert session_recordings == [] @snapshot_clickhouse_queries @@ -2557,7 +2570,12 @@ def test_filter_for_recordings_with_console_warns(self): team_id=self.team.id, ) - (session_recordings, _, _) = self._filter_recordings_by({"console_logs": ["warn"]}) + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) assert sorted( [(sr["session_id"], sr["console_warn_count"]) for sr in session_recordings], @@ -2566,7 +2584,12 @@ def test_filter_for_recordings_with_console_warns(self): (with_logs_session_id, 4), ] - (session_recordings, _, _) = self._filter_recordings_by({"console_logs": ["info"]}) + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) assert session_recordings == [] @@ -2600,7 +2623,12 @@ def test_filter_for_recordings_with_console_errors(self): team_id=self.team.id, ) - (session_recordings, _, _) = self._filter_recordings_by({"console_logs": ["error"]}) + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["error"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) assert sorted( [(sr["session_id"], sr["console_error_count"]) for sr in session_recordings], @@ -2609,7 +2637,12 @@ def test_filter_for_recordings_with_console_errors(self): (with_logs_session_id, 4), ] - (session_recordings, _, _) = self._filter_recordings_by({"console_logs": ["info"]}) + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) assert session_recordings == [] @@ -2690,7 +2723,12 @@ def test_filter_for_recordings_with_mixed_console_counts(self): }, ) - (session_recordings, _, _) = self._filter_recordings_by({"console_logs": ["warn", "error"]}) + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) assert sorted([sr["session_id"] for sr in session_recordings]) == sorted( [ @@ -2700,7 +2738,12 @@ def test_filter_for_recordings_with_mixed_console_counts(self): ] ) - (session_recordings, _, _) = self._filter_recordings_by({"console_logs": ["info"]}) + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) assert sorted([sr["session_id"] for sr in session_recordings]) == sorted( [ @@ -2786,8 +2829,8 @@ def test_filter_for_recordings_by_console_text(self): (session_recordings, _, _) = self._filter_recordings_by( { # there are 5 warn and 4 error logs, message 4 matches in both - "console_logs": ["warn", "error"], - "console_search_query": "message 4", + "console_log_filters": '[{"key": "level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 4", "operator": "exact", "type": "log_entry"}]', + "operand": "OR", } ) @@ -2802,22 +2845,8 @@ def test_filter_for_recordings_by_console_text(self): (session_recordings, _, _) = self._filter_recordings_by( { # there are 5 warn and 4 error logs, message 5 matches only matches in warn - "console_logs": ["warn", "error"], - "console_search_query": "message 5", - } - ) - - assert sorted([sr["session_id"] for sr in session_recordings]) == sorted( - [ - with_warns_session_id, - ] - ) - - (session_recordings, _, _) = self._filter_recordings_by( - { - # match is case-insensitive - "console_logs": ["warn", "error"], - "console_search_query": "MESSAGE 5", + "console_log_filters": '[{"key": "level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 5", "operator": "icontains", "type": "log_entry"}]', + "operand": "AND", } ) @@ -2830,12 +2859,12 @@ def test_filter_for_recordings_by_console_text(self): (session_recordings, _, _) = self._filter_recordings_by( { # message 5 does not match log level "info" - "console_logs": ["info"], - "console_search_query": "message 5", + "console_log_filters": '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 5", "operator": "icontains", "type": "log_entry"}]', + "operand": "AND", } ) - assert sorted([sr["session_id"] for sr in session_recordings]) == sorted([]) + assert sorted([sr["session_id"] for sr in session_recordings]) == [] @snapshot_clickhouse_queries def test_filter_for_recordings_by_snapshot_source(self): @@ -2860,14 +2889,14 @@ def test_filter_for_recordings_by_snapshot_source(self): (session_recordings, _, _) = self._filter_recordings_by( { - "snapshot_source": '{"key": "snapshot_source", "value": ["web"], "operator": "exact", "type": "recording"}' + "having_predicates": '[{"key": "snapshot_source", "value": ["web"], "operator": "exact", "type": "recording"}]' } ) assert [r["session_id"] for r in session_recordings] == [session_id_one] (session_recordings, _, _) = self._filter_recordings_by( { - "snapshot_source": '{"key": "snapshot_source", "value": ["mobile"], "operator": "exact", "type": "recording"}' + "having_predicates": '[{"key": "snapshot_source", "value": ["mobile"], "operator": "exact", "type": "recording"}]' } ) assert [r["session_id"] for r in session_recordings] == [session_id_two] diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py index 96d828949b97a..f46f89537c026 100644 --- a/posthog/session_recordings/session_recording_api.py +++ b/posthog/session_recordings/session_recording_api.py @@ -17,7 +17,7 @@ from drf_spectacular.utils import extend_schema from loginas.utils import is_impersonated_session from rest_framework import exceptions, request, serializers, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.renderers import JSONRenderer from rest_framework.response import Response from rest_framework.utils.encoders import JSONEncoder @@ -44,10 +44,7 @@ from posthog.session_recordings.queries.session_recording_properties import ( SessionRecordingProperties, ) -from posthog.rate_limit import ( - ClickHouseBurstRateThrottle, - ClickHouseSustainedRateThrottle, -) +from posthog.rate_limit import ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle, PersonalApiKeyRateThrottle from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents from posthog.session_recordings.realtime_snapshots import get_realtime_snapshots, publish_subscription from ee.session_recordings.session_summary.summarize_session import summarize_recording @@ -56,7 +53,13 @@ from posthog.session_recordings.snapshots.convert_legacy_snapshots import convert_original_version_lts_recording from posthog.storage import object_storage from prometheus_client import Counter +from posthog.auth import PersonalAPIKeyAuthentication +SNAPSHOTS_BY_PERSONAL_API_KEY_COUNTER = Counter( + "snapshots_personal_api_key_counter", + "Requests for recording snapshots per personal api key", + labelnames=["api_key", "source"], +) SNAPSHOT_SOURCE_REQUESTED = Counter( "session_snapshots_requested_counter", @@ -250,9 +253,20 @@ def stream_from(url: str, headers: dict | None = None) -> Generator[requests.Res session.close() +class SnapshotsBurstRateThrottle(PersonalApiKeyRateThrottle): + scope = "snapshots_burst" + rate = "120/minute" + + +class SnapshotsSustainedRateThrottle(PersonalApiKeyRateThrottle): + scope = "snapshots_sustained" + rate = "600/hour" + + # NOTE: Could we put the sharing stuff in the shared mixin :thinking: class SessionRecordingViewSet(TeamAndOrgViewSetMixin, viewsets.GenericViewSet): scope_object = "session_recording" + scope_object_read_actions = ["list", "retrieve", "snapshots"] throttle_classes = [ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle] serializer_class = SessionRecordingSerializer # We don't use this @@ -279,11 +293,12 @@ def list(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: return list_recordings_response(filter, request, self.get_serializer_context()) @extend_schema( + exclude=True, description=""" Gets a list of event ids that match the given session recording filter. The filter must include a single session ID. And must include at least one event or action filter. - This API is intended for internal use and might have unannounced breaking changes.""" + This API is intended for internal use and might have unannounced breaking changes.""", ) @action(methods=["GET"], detail=False) def matching_events(self, request: request.Request, *args: Any, **kwargs: Any) -> JsonResponse: @@ -345,6 +360,7 @@ def destroy(self, request: request.Request, *args: Any, **kwargs: Any) -> Respon return Response({"success": True}, status=204) + @extend_schema(exclude=True) @action(methods=["POST"], detail=True) def persist(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: recording = self.get_object() @@ -360,7 +376,12 @@ def persist(self, request: request.Request, *args: Any, **kwargs: Any) -> Respon return Response({"success": True}) @extend_schema(exclude=True) - @action(methods=["GET"], detail=True, renderer_classes=[SurrogatePairSafeJSONRenderer]) + @action( + methods=["GET"], + detail=True, + renderer_classes=[SurrogatePairSafeJSONRenderer], + throttle_classes=[SnapshotsBurstRateThrottle, SnapshotsSustainedRateThrottle], + ) def snapshots(self, request: request.Request, **kwargs): """ Snapshots can be loaded from multiple places: @@ -399,6 +420,10 @@ def snapshots(self, request: request.Request, **kwargs): if source: SNAPSHOT_SOURCE_REQUESTED.labels(source=source).inc() + personal_api_key = PersonalAPIKeyAuthentication.find_key_with_source(request) + if personal_api_key: + SNAPSHOTS_BY_PERSONAL_API_KEY_COUNTER.labels(api_key=personal_api_key, source=source).inc() + if not source: return self._gather_session_recording_sources(recording) elif source == "realtime": @@ -497,6 +522,7 @@ def _distinct_id_from_request(request): return "anonymous" # Returns properties given a list of session recording ids + @extend_schema(exclude=True) @action(methods=["GET"], detail=False) def properties(self, request: request.Request, **kwargs): filter = SessionRecordingsFilter(request=request, team=self.team) @@ -521,6 +547,7 @@ def properties(self, request: request.Request, **kwargs): return Response({"results": session_recording_serializer.data}) + @extend_schema(exclude=True) @action(methods=["POST"], detail=True) def summarize(self, request: request.Request, **kwargs): if not request.user.is_authenticated: @@ -561,6 +588,7 @@ def summarize(self, request: request.Request, **kwargs): ) return r + @extend_schema(exclude=True) @action(methods=["GET"], detail=True) def similar_sessions(self, request: request.Request, **kwargs): if not request.user.is_authenticated: @@ -590,6 +618,7 @@ def similar_sessions(self, request: request.Request, **kwargs): r = Response(recordings, headers={"Cache-Control": "max-age=15"}) return r + @extend_schema(exclude=True) @action(methods=["GET"], detail=False) def error_clusters(self, request: request.Request, **kwargs): if not request.user.is_authenticated: diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py index 5efc1b1e58f4f..f6868001660d3 100644 --- a/posthog/session_recordings/test/test_session_recordings.py +++ b/posthog/session_recordings/test/test_session_recordings.py @@ -4,6 +4,7 @@ from datetime import datetime, timedelta, UTC from unittest.mock import ANY, patch, MagicMock, call from urllib.parse import urlencode +from typing import cast from parameterized import parameterized from dateutil.parser import parse @@ -23,6 +24,7 @@ from posthog.session_recordings.queries.test.session_replay_sql import ( produce_replay_summary, ) +from posthog.models.property import Property from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, @@ -171,11 +173,17 @@ def test_can_list_recordings_even_when_the_person_has_multiple_distinct_ids(self def test_console_log_filters_are_correctly_passed_to_listing(self, mock_summary_lister): mock_summary_lister.return_value.run.return_value = ([], False) - self.client.get(f'/api/projects/{self.team.id}/session_recordings?console_logs=["warn", "error"]') + params_string = urlencode( + { + "console_log_filters": '[{"key": "console_log_level", "value": ["warn", "error"], "operator": "exact", "type": "recording"}]' + } + ) + self.client.get(f"/api/projects/{self.team.id}/session_recordings?{params_string}") assert len(mock_summary_lister.call_args_list) == 1 filter_passed_to_mock: SessionRecordingsFilter = mock_summary_lister.call_args_list[0].kwargs["filter"] - assert filter_passed_to_mock.console_logs_filter == ["warn", "error"] + console_filter = cast(Property, filter_passed_to_mock.console_log_filters.values[0]) + assert console_filter.value == ["warn", "error"] @snapshot_postgres_queries def test_listing_recordings_is_not_nplus1_for_persons(self): diff --git a/posthog/tasks/alerts/checks.py b/posthog/tasks/alerts/checks.py index e5c34f578a048..cbc4fb4604da5 100644 --- a/posthog/tasks/alerts/checks.py +++ b/posthog/tasks/alerts/checks.py @@ -1,107 +1,138 @@ +from datetime import datetime, timedelta, UTC +from typing import Optional + from celery import shared_task -from celery.canvas import group, chain +from celery.canvas import chain +from django.db import transaction from django.utils import timezone -import math import structlog +from sentry_sdk import capture_exception from posthog.api.services.query import ExecutionMode from posthog.caching.calculate_results import calculate_for_query_based_insight from posthog.email import EmailMessage +from posthog.errors import CHQueryErrorTooManySimultaneousQueries from posthog.hogql_queries.legacy_compatibility.flagged_conversion_manager import ( conversion_to_query_based, ) -from posthog.models import Alert -from posthog.schema import AnomalyCondition +from posthog.models import AlertConfiguration, Team +from posthog.models.alert import AlertCheck +from posthog.tasks.utils import CeleryQueue logger = structlog.get_logger(__name__) def check_all_alerts() -> None: - alert_ids = list(Alert.objects.all().values_list("id", flat=True)) + # TODO: Consider aligning insight calculation with cache warming of insights, see warming.py + # Currently it's implicitly aligned by alerts obviously also using cache if available + + # Use a fixed expiration time since tasks in the chain are executed sequentially + expire_after = datetime.now(UTC) + timedelta(minutes=30) + + teams = Team.objects.filter(alertconfiguration__isnull=False).distinct() - group_count = 10 - # All groups but the last one will have a group_size size. - # The last group will have at most group_size size. - group_size = int(math.ceil(len(alert_ids) / group_count)) + for team in teams: + alert_ids = list(AlertConfiguration.objects.filter(team=team, enabled=True).values_list("id", flat=True)) - groups = [] - for i in range(0, len(alert_ids), group_size): - alert_id_group = alert_ids[i : i + group_size] - chained_calls = chain([check_alert_task.si(alert_id) for alert_id in alert_id_group]) - groups.append(chained_calls) + # We chain the task execution to prevent queries *for a single team* running at the same time + chain(*(check_alert_task.si(str(alert_id)).set(expires=expire_after) for alert_id in alert_ids))() - group(groups).apply_async() +@transaction.atomic +def check_alert(alert_id: str) -> None: + try: + alert = AlertConfiguration.objects.get(id=alert_id, enabled=True) + except AlertConfiguration.DoesNotExist: + logger.warning("Alert not found or not enabled", alert_id=alert_id) + return -def check_alert(alert_id: int) -> None: - alert = Alert.objects.get(pk=alert_id) insight = alert.insight + error: Optional[dict] = None + + try: + with conversion_to_query_based(insight): + calculation_result = calculate_for_query_based_insight( + insight, + execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, + user=None, + ) + + if not calculation_result.result: + raise RuntimeError(f"No results for alert {alert.id}") + + result = calculation_result.result[0] + aggregated_value = result["aggregated_value"] + except Exception as e: + event_id = capture_exception(e) + error = { + "sentry_event_id": event_id, + "message": str(e), + } + aggregated_value = None + + # Lock alert to prevent concurrent state changes + alert = AlertConfiguration.objects.select_for_update().get(id=alert_id, enabled=True) + check, matches = alert.add_check(calculated_value=aggregated_value, error=error) + + if not check.state == "firing": + logger.info("Check state is %s", check.state, alert_id=alert.id) + return - with conversion_to_query_based(insight): - calculation_result = calculate_for_query_based_insight( - insight, - execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, - user=None, - ) - - if not calculation_result.result: - raise RuntimeError(f"No results for alert {alert.id}") - - anomaly_condition = AnomalyCondition.model_validate(alert.anomaly_condition) - thresholds = anomaly_condition.absoluteThreshold - - result = calculation_result.result[0] - aggregated_value = result["aggregated_value"] - anomalies_descriptions = [] - - if thresholds.lower is not None and aggregated_value < thresholds.lower: - anomalies_descriptions += [ - f"The trend value ({aggregated_value}) is below the lower threshold ({thresholds.lower})" - ] - if thresholds.upper is not None and aggregated_value > thresholds.upper: - anomalies_descriptions += [ - f"The trend value ({aggregated_value}) is above the upper threshold ({thresholds.upper})" - ] - - if not anomalies_descriptions: - logger.info("No threshold met", alert_id=alert.id) + if not matches: + # We might be firing but have no (new) matches to notify about return - send_notifications(alert, anomalies_descriptions) + send_notifications(alert, matches) -@shared_task(ignore_result=True) +@shared_task( + ignore_result=True, + expires=60 * 60, +) def check_all_alerts_task() -> None: check_all_alerts() -@shared_task(ignore_result=True) -def check_alert_task(alert_id: int) -> None: +@shared_task( + ignore_result=True, + queue=CeleryQueue.ANALYTICS_LIMITED.value, # Important! Prevents Clickhouse from being overwhelmed + autoretry_for=(CHQueryErrorTooManySimultaneousQueries,), + retry_backoff=1, + retry_backoff_max=10, + max_retries=10, + expires=60 * 60, +) +def check_alert_task(alert_id: str) -> None: check_alert(alert_id) -def send_notifications(alert: Alert, anomalies_descriptions: list[str]) -> None: - subject = f"PostHog alert {alert.name} has anomalies" - campaign_key = f"alert-anomaly-notification-{alert.id}-{timezone.now().timestamp()}" +@shared_task(ignore_result=True) +def checks_cleanup_task() -> None: + AlertCheck.clean_up_old_checks() + + +def send_notifications(alert: AlertConfiguration, matches: list[str]) -> None: + subject = f"PostHog alert {alert.name} is firing" + campaign_key = f"alert-firing-notification-{alert.id}-{timezone.now().timestamp()}" insight_url = f"/project/{alert.team.pk}/insights/{alert.insight.short_id}" alert_url = f"{insight_url}/alerts/{alert.id}" message = EmailMessage( campaign_key=campaign_key, subject=subject, - template_name="alert_anomaly", + template_name="alert_check_firing", template_context={ - "anomalies_descriptions": anomalies_descriptions, + "match_descriptions": matches, "insight_url": insight_url, "insight_name": alert.insight.name, "alert_url": alert_url, "alert_name": alert.name, }, ) - targets = list(filter(len, alert.target_value.split(","))) + targets = alert.subscribed_users.all().values_list("email", flat=True) if not targets: raise RuntimeError(f"no targets configured for the alert {alert.id}") for target in targets: message.add_recipient(email=target) - logger.info(f"Send notifications about {len(anomalies_descriptions)} anomalies", alert_id=alert.id) + logger.info(f"Send notifications about {len(matches)} anomalies", alert_id=alert.id) message.send() diff --git a/posthog/tasks/alerts/test/test_checks.py b/posthog/tasks/alerts/test/test_checks.py index fb5f93b3cb166..2504c90dc3a3f 100644 --- a/posthog/tasks/alerts/test/test_checks.py +++ b/posthog/tasks/alerts/test/test_checks.py @@ -1,16 +1,16 @@ -import pytest from typing import Optional from unittest.mock import MagicMock, patch from freezegun import freeze_time +from posthog.models.alert import AlertCheck from posthog.models.instance_setting import set_instance_setting from posthog.tasks.alerts.checks import send_notifications, check_alert from posthog.test.base import APIBaseTest, _create_event, flush_persons_and_events, ClickhouseDestroyTablesMixin from posthog.api.test.dashboards import DashboardAPI from posthog.schema import ChartDisplayType, EventsNode, TrendsQuery, TrendsFilter from posthog.tasks.test.utils_email_tests import mock_email_messages -from posthog.models import Alert +from posthog.models import AlertConfiguration @freeze_time("2024-06-02T08:55:00.000Z") @@ -41,20 +41,29 @@ def setUp(self) -> None: data={ "name": "alert name", "insight": self.insight["id"], - "target_value": "a@b.c,d@e.f", - "anomaly_condition": {"absoluteThreshold": {}}, + "subscribed_users": [self.user.id], + "threshold": {"configuration": {"absoluteThreshold": {}}}, }, ).json() def set_thresholds(self, lower: Optional[int] = None, upper: Optional[int] = None) -> None: self.client.patch( f"/api/projects/{self.team.id}/alerts/{self.alert['id']}", - data={"anomaly_condition": {"absoluteThreshold": {"lower": lower, "upper": upper}}}, + data={"threshold": {"configuration": {"absoluteThreshold": {"lower": lower, "upper": upper}}}}, ) def get_anomalies_descriptions(self, mock_send_notifications: MagicMock, call_index: int) -> list[str]: return mock_send_notifications.call_args_list[call_index].args[1] + def test_alert_is_not_triggered_when_disabled(self, mock_send_notifications: MagicMock) -> None: + self.set_thresholds(lower=1) + + self.client.patch(f"/api/projects/{self.team.id}/alerts/{self.alert['id']}", data={"enabled": False}) + + check_alert(self.alert["id"]) + + assert mock_send_notifications.call_count == 0 + def test_alert_is_triggered_for_values_above_higher_threshold(self, mock_send_notifications: MagicMock) -> None: self.set_thresholds(upper=0) @@ -70,7 +79,7 @@ def test_alert_is_triggered_for_values_above_higher_threshold(self, mock_send_no assert mock_send_notifications.call_count == 1 alert = mock_send_notifications.call_args_list[0].args[0] - assert alert.id == self.alert["id"] + assert str(alert.id) == self.alert["id"] anomalies_descriptions = self.get_anomalies_descriptions(mock_send_notifications, call_index=0) assert len(anomalies_descriptions) == 1 @@ -100,6 +109,77 @@ def test_alert_is_triggered_for_value_below_lower_threshold(self, mock_send_noti anomalies = self.get_anomalies_descriptions(mock_send_notifications, call_index=0) assert "The trend value (0) is below the lower threshold (1.0)" in anomalies + def test_alert_triggers_but_does_not_send_notification_during_firing( + self, mock_send_notifications: MagicMock + ) -> None: + self.set_thresholds(lower=1) + + check_alert(self.alert["id"]) + + assert mock_send_notifications.call_count == 1 + assert AlertCheck.objects.filter(alert_configuration=self.alert["id"]).latest("created_at").state == "firing" + + with freeze_time("2024-06-02T09:00:00.000Z"): + check_alert(self.alert["id"]) + + assert mock_send_notifications.call_count == 1 + assert ( + AlertCheck.objects.filter(alert_configuration=self.alert["id"]).latest("created_at").state == "firing" + ) + + with freeze_time("2024-06-02T09:55:00.000Z"): + self.set_thresholds(lower=0) + + check_alert(self.alert["id"]) + + assert mock_send_notifications.call_count == 1 + assert ( + AlertCheck.objects.filter(alert_configuration=self.alert["id"]).latest("created_at").state == "not_met" + ) + + with freeze_time("2024-06-02T11:00:00.000Z"): + self.set_thresholds(lower=1) + + check_alert(self.alert["id"]) + + assert mock_send_notifications.call_count == 2 + assert ( + AlertCheck.objects.filter(alert_configuration=self.alert["id"]).latest("created_at").state == "firing" + ) + + # test clean up old checks (> 14 days) + with freeze_time("2024-06-20T11:00:00.000Z"): + AlertCheck.clean_up_old_checks() + assert AlertCheck.objects.filter(alert_configuration=self.alert["id"]).count() == 0 + + def test_alert_is_set_to_inactive_when_disabled(self, mock_send_notifications: MagicMock) -> None: + self.set_thresholds(lower=1) + + check_alert(self.alert["id"]) + + assert mock_send_notifications.call_count == 1 + assert AlertCheck.objects.filter(alert_configuration=self.alert["id"]).latest("created_at").state == "firing" + + self.client.patch(f"/api/projects/{self.team.id}/alerts/{self.alert['id']}", data={"enabled": False}) + + # Check that the alert is set to inactive and checks are not triggered + check_alert(self.alert["id"]) + + assert mock_send_notifications.call_count == 1 + assert AlertConfiguration.objects.get(pk=self.alert["id"]).state == "inactive" + + def test_alert_is_set_to_inactive_when_threshold_changes(self, mock_send_notifications: MagicMock) -> None: + self.set_thresholds(lower=1) + + check_alert(self.alert["id"]) + + assert mock_send_notifications.call_count == 1 + assert AlertCheck.objects.filter(alert_configuration=self.alert["id"]).latest("created_at").state == "firing" + + self.set_thresholds(lower=2) + + assert AlertConfiguration.objects.get(pk=self.alert["id"]).state == "inactive" + def test_alert_is_not_triggered_for_normal_values(self, mock_send_notifications: MagicMock) -> None: self.set_thresholds(lower=0, upper=1) @@ -124,12 +204,30 @@ def test_error_while_calculating_no_alert(self, mock_send_notifications: MagicMo } )[1] - self.client.patch(f"/api/projects/{self.team.id}/alerts/{self.alert['id']}", data={"insight": insight["id"]}) + # Change with ORM to bypass API validation + AlertConfiguration.objects.filter(pk=self.alert["id"]).update(insight=insight["id"]) - with pytest.raises(KeyError): - check_alert(self.alert["id"]) + check_alert(self.alert["id"]) assert mock_send_notifications.call_count == 0 + latest_alert_check = AlertCheck.objects.filter(alert_configuration=self.alert["id"]).latest("created_at") + assert latest_alert_check.error["message"] == "'aggregated_value'" + + # mock calculate_for_query_based_insight to raise a different exception + with patch( + "posthog.tasks.alerts.checks.calculate_for_query_based_insight" + ) as mock_calculate_for_query_based_insight: + mock_calculate_for_query_based_insight.side_effect = Exception("Some error") + + with freeze_time("2024-06-02T09:00:00.000Z"): + check_alert(self.alert["id"]) + assert mock_send_notifications.call_count == 0 + + latest_alert_check = AlertCheck.objects.filter(alert_configuration=self.alert["id"]).latest( + "created_at" + ) + assert latest_alert_check.error["message"] == "Some error" + def test_alert_with_insight_with_filter(self, mock_send_notifications: MagicMock) -> None: insight = self.dashboard_api.create_insight( data={"name": "insight", "filters": {"events": [{"id": "$pageview"}], "display": "BoldNumber"}} @@ -147,13 +245,12 @@ def test_alert_with_insight_with_filter(self, mock_send_notifications: MagicMock @patch("posthog.tasks.alerts.checks.EmailMessage") def test_send_emails(self, MockEmailMessage: MagicMock, mock_send_notifications: MagicMock) -> None: mocked_email_messages = mock_email_messages(MockEmailMessage) - alert = Alert.objects.get(pk=self.alert["id"]) + alert = AlertConfiguration.objects.get(pk=self.alert["id"]) send_notifications(alert, ["first anomaly description", "second anomaly description"]) assert len(mocked_email_messages) == 1 email = mocked_email_messages[0] - assert len(email.to) == 2 - assert email.to[0]["recipient"] == "a@b.c" - assert email.to[1]["recipient"] == "d@e.f" + assert len(email.to) == 1 + assert email.to[0]["recipient"] == "user1@posthog.com" assert "first anomaly description" in email.html_body assert "second anomaly description" in email.html_body diff --git a/posthog/tasks/scheduled.py b/posthog/tasks/scheduled.py index c0ff2a468391b..df765689fb362 100644 --- a/posthog/tasks/scheduled.py +++ b/posthog/tasks/scheduled.py @@ -8,7 +8,7 @@ from posthog.caching.warming import schedule_warming_for_teams_task from posthog.celery import app -from posthog.tasks.alerts.checks import check_all_alerts_task +from posthog.tasks.alerts.checks import check_all_alerts_task, checks_cleanup_task from posthog.tasks.integrations import refresh_integrations from posthog.tasks.tasks import ( calculate_cohort, @@ -40,12 +40,10 @@ redis_celery_queue_depth, redis_heartbeat, schedule_all_subscriptions, - schedule_cache_updates_task, send_org_usage_reports, start_poll_query_performance, stop_surveys_reached_target, sync_all_organization_available_product_features, - sync_insight_cache_states_task, update_event_partitions, update_quota_limiting, verify_persons_data_in_sync, @@ -96,7 +94,7 @@ def setup_periodic_tasks(sender: Celery, **kwargs: Any) -> None: add_periodic_task_with_expiry(sender, 20, start_poll_query_performance.s(), "20 sec query performance heartbeat") sender.add_periodic_task( - crontab(hour="*", minute="*/30"), + crontab(hour="*", minute="0"), schedule_warming_for_teams_task.s(), name="schedule warming for largest teams", ) @@ -148,21 +146,6 @@ def setup_periodic_tasks(sender: Celery, **kwargs: Any) -> None: # Sync all Organization.available_product_features every hour, only for billing v1 orgs sender.add_periodic_task(crontab(minute="30", hour="*"), sync_all_organization_available_product_features.s()) - sync_insight_cache_states_schedule = get_crontab(settings.SYNC_INSIGHT_CACHE_STATES_SCHEDULE) - if sync_insight_cache_states_schedule: - sender.add_periodic_task( - sync_insight_cache_states_schedule, - sync_insight_cache_states_task.s(), - name="sync insight cache states", - ) - - add_periodic_task_with_expiry( - sender, - settings.UPDATE_CACHED_DASHBOARD_ITEMS_INTERVAL_SECONDS, - schedule_cache_updates_task.s(), - "check dashboard items", - ) - sender.add_periodic_task(crontab(minute="*/15"), check_async_migration_health.s()) if settings.INGESTION_LAG_METRIC_TEAM_IDS: @@ -261,9 +244,15 @@ def setup_periodic_tasks(sender: Celery, **kwargs: Any) -> None: ) sender.add_periodic_task( - crontab(hour="*", minute="20"), + crontab(hour="*", minute="45"), check_all_alerts_task.s(), - name="detect alerts' anomalies and notify about them", + name="check alerts for matches and send notifications", + ) + + sender.add_periodic_task( + crontab(hour="8", minute="0"), + checks_cleanup_task.s(), + name="clean up old alert checks", ) if settings.EE_AVAILABLE: diff --git a/posthog/tasks/sync_to_billing.py b/posthog/tasks/sync_to_billing.py index 5100139e7e582..23e5c09b87aec 100644 --- a/posthog/tasks/sync_to_billing.py +++ b/posthog/tasks/sync_to_billing.py @@ -4,7 +4,7 @@ from sentry_sdk import capture_message -@shared_task(ignore_result=True) +@shared_task(ignore_result=True, rate_limit="4/s") def sync_to_billing(organization_id: str) -> None: organization = Organization.objects.get(id=organization_id) diff --git a/posthog/templates/email/alert_anomaly.html b/posthog/templates/email/alert_anomaly.html deleted file mode 100644 index 49636488288dc..0000000000000 --- a/posthog/templates/email/alert_anomaly.html +++ /dev/null @@ -1,10 +0,0 @@ -{% extends "email/base.html" %} {% load posthog_assets %} {% block section %} -

- The {{ alert_name }} alert detected following anomalies for {{ insight_name }}: -

    - {% for anomaly_description in anomalies_descriptions %} -
  • {{ anomaly_description }}
  • - {% endfor %} -
-

-{% endblock %}{% load posthog_filters %} diff --git a/posthog/templates/email/alert_check_firing.html b/posthog/templates/email/alert_check_firing.html new file mode 100644 index 0000000000000..52b10b504e175 --- /dev/null +++ b/posthog/templates/email/alert_check_firing.html @@ -0,0 +1,14 @@ +{% extends "email/base.html" %} {% load posthog_assets %} {% block section %} +

+ The {{ alert_name }} alert is firing for {{ insight_name }}: +

+
    + {% for item in match_descriptions %} +
  • {{ item }}
  • + {% endfor %} +
+ +{% endblock %}{% load posthog_filters %} diff --git a/posthog/temporal/batch_exports/batch_exports.py b/posthog/temporal/batch_exports/batch_exports.py index 70a8b323a199c..19658ec1a7b35 100644 --- a/posthog/temporal/batch_exports/batch_exports.py +++ b/posthog/temporal/batch_exports/batch_exports.py @@ -478,7 +478,7 @@ async def finish_batch_export_run(inputs: FinishBatchExportRunInputs) -> None: } batch_export_run = await database_sync_to_async(update_batch_export_run)( run_id=uuid.UUID(inputs.id), - finished_at=dt.datetime.now(), + finished_at=dt.datetime.now(dt.UTC), **update_params, ) diff --git a/posthog/temporal/batch_exports/postgres_batch_export.py b/posthog/temporal/batch_exports/postgres_batch_export.py index cee7e4615f1f3..2fcb13b35d95a 100644 --- a/posthog/temporal/batch_exports/postgres_batch_export.py +++ b/posthog/temporal/batch_exports/postgres_batch_export.py @@ -40,6 +40,7 @@ JsonType, apeek_first_and_rewind, cast_record_batch_json_columns, + make_retryable_with_exponential_backoff, set_status_to_running_task, ) from posthog.temporal.common.clickhouse import get_client @@ -110,7 +111,9 @@ def connection(self) -> psycopg.AsyncConnection: return self._connection @contextlib.asynccontextmanager - async def connect(self) -> typing.AsyncIterator[typing.Self]: + async def connect( + self, + ) -> typing.AsyncIterator[typing.Self]: """Manage a PostgreSQL connection. By using a context manager Pyscopg will take care of closing the connection. @@ -120,7 +123,12 @@ async def connect(self) -> typing.AsyncIterator[typing.Self]: # Disable certificate verification for self-signed certificates. kwargs["sslrootcert"] = None - connection = await psycopg.AsyncConnection.connect( + connect = make_retryable_with_exponential_backoff( + psycopg.AsyncConnection.connect, + retryable_exceptions=(psycopg.OperationalError,), + ) + + connection: psycopg.AsyncConnection = await connect( user=self.user, password=self.password, dbname=self.database, @@ -129,6 +137,7 @@ async def connect(self) -> typing.AsyncIterator[typing.Self]: sslmode="prefer" if settings.TEST else "require", **kwargs, ) + async with connection as connection: self._connection = connection yield self diff --git a/posthog/temporal/batch_exports/s3_batch_export.py b/posthog/temporal/batch_exports/s3_batch_export.py index f26e1418f3681..95d0d32f892fe 100644 --- a/posthog/temporal/batch_exports/s3_batch_export.py +++ b/posthog/temporal/batch_exports/s3_batch_export.py @@ -1,3 +1,4 @@ +import asyncio import contextlib import dataclasses import datetime as dt @@ -130,12 +131,12 @@ def __init__(self, part_number: int): super().__init__(f"An intermittent `RequestTimeout` was raised while attempting to upload part {part_number}") -class S3MultiPartUploadState(typing.NamedTuple): - upload_id: str - parts: list[dict[str, str | int]] +Part = dict[str, str | int] -Part = dict[str, str | int] +class S3MultiPartUploadState(typing.NamedTuple): + upload_id: str + parts: list[Part] class S3MultiPartUpload: @@ -274,7 +275,15 @@ async def abort(self): self.upload_id = None self.parts = [] - async def upload_part(self, body: BatchExportTemporaryFile, rewind: bool = True): + async def upload_part( + self, + body: BatchExportTemporaryFile, + rewind: bool = True, + max_attempts: int = 5, + initial_retry_delay: float | int = 2, + max_retry_delay: float | int = 32, + exponential_backoff_coefficient: int = 2, + ): """Upload a part of this multi-part upload.""" next_part_number = self.part_number + 1 @@ -286,26 +295,64 @@ async def upload_part(self, body: BatchExportTemporaryFile, rewind: bool = True) # So we tell mypy to be nice with us. reader = io.BufferedReader(body) # type: ignore + try: + etag = await self.upload_part_retryable( + reader, + next_part_number, + max_attempts=max_attempts, + initial_retry_delay=initial_retry_delay, + max_retry_delay=max_retry_delay, + exponential_backoff_coefficient=exponential_backoff_coefficient, + ) + except Exception: + raise + + finally: + reader.detach() # BufferedReader closes the file otherwise. + + self.parts.append({"PartNumber": next_part_number, "ETag": etag}) + + async def upload_part_retryable( + self, + reader: io.BufferedReader, + next_part_number: int, + max_attempts: int = 5, + initial_retry_delay: float | int = 2, + max_retry_delay: float | int = 32, + exponential_backoff_coefficient: int = 2, + ) -> str: + """Attempt to upload a part for this multi-part upload retrying on transient errors.""" + response: dict[str, str] | None = None + attempt = 0 + async with self.s3_client() as s3_client: - try: - response = await s3_client.upload_part( - Bucket=self.bucket_name, - Key=self.key, - PartNumber=next_part_number, - UploadId=self.upload_id, - Body=reader, - ) - except botocore.exceptions.ClientError as err: - error_code = err.response.get("Error", {}).get("Code", None) + while response is None: + try: + response = await s3_client.upload_part( + Bucket=self.bucket_name, + Key=self.key, + PartNumber=next_part_number, + UploadId=self.upload_id, + Body=reader, + ) + + except botocore.exceptions.ClientError as err: + error_code = err.response.get("Error", {}).get("Code", None) + attempt += 1 + + if error_code is not None and error_code == "RequestTimeout": + if attempt >= max_attempts: + raise IntermittentUploadPartTimeoutError(part_number=next_part_number) from err - if error_code is not None and error_code == "RequestTimeout": - raise IntermittentUploadPartTimeoutError(part_number=next_part_number) from err - else: - raise + await asyncio.sleep( + min(max_retry_delay, initial_retry_delay * (attempt**exponential_backoff_coefficient)) + ) - reader.detach() # BufferedReader closes the file otherwise. + continue + else: + raise - self.parts.append({"PartNumber": next_part_number, "ETag": response["ETag"]}) + return response["ETag"] async def __aenter__(self): """Asynchronous context manager protocol enter.""" @@ -395,7 +442,7 @@ async def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tupl # This is the error we expect when no details as the sequence will be empty. interval_start = inputs.data_interval_start logger.debug( - "Did not receive details from previous activity Excecution. Export will start from the beginning %s", + "Did not receive details from previous activity Execution. Export will start from the beginning %s", interval_start, ) except Exception: @@ -403,7 +450,7 @@ async def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tupl # Ideally, any new exceptions should be added to the previous block after the first time and we will never land here. interval_start = inputs.data_interval_start logger.warning( - "Did not receive details from previous activity Excecution due to an unexpected error. Export will start from the beginning %s", + "Did not receive details from previous activity Execution due to an unexpected error. Export will start from the beginning %s", interval_start, ) else: @@ -526,6 +573,7 @@ async def flush_to_s3( ) await s3_upload.upload_part(local_results_file) + rows_exported.add(records_since_last_flush) bytes_exported.add(bytes_since_last_flush) @@ -585,7 +633,7 @@ def get_batch_export_writer( ) elif inputs.file_format == "JSONLines": writer = JSONLBatchExportWriter( - max_bytes=settings.BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES, + max_bytes=max_bytes, flush_callable=flush_callable, compression=inputs.compression, ) @@ -677,6 +725,8 @@ async def run(self, inputs: S3BatchExportInputs): "ClientError", # An S3 bucket doesn't exist. "NoSuchBucket", + # Couldn't connect to custom S3 endpoint + "EndpointConnectionError", ], finish_inputs=finish_inputs, ) diff --git a/posthog/temporal/batch_exports/snowflake_batch_export.py b/posthog/temporal/batch_exports/snowflake_batch_export.py index b0ba71bd85961..ad0afb833e176 100644 --- a/posthog/temporal/batch_exports/snowflake_batch_export.py +++ b/posthog/temporal/batch_exports/snowflake_batch_export.py @@ -83,6 +83,12 @@ class SnowflakeConnectionError(Exception): pass +class SnowflakeRetryableConnectionError(Exception): + """Raised when a connection to Snowflake is not established.""" + + pass + + @dataclasses.dataclass class SnowflakeHeartbeatDetails(BatchExportHeartbeatDetails): """The Snowflake batch export details included in every heartbeat. @@ -192,7 +198,13 @@ async def connect(self): ) except OperationalError as err: - raise SnowflakeConnectionError("Could not connect to Snowflake") from err + if err.errno == 251012: + # 251012: Generic retryable error code + raise SnowflakeRetryableConnectionError( + "Could not connect to Snowflake but this error may be retried" + ) from err + else: + raise SnowflakeConnectionError(f"Could not connect to Snowflake - {err.errno}: {err.msg}") from err self._connection = connection diff --git a/posthog/temporal/batch_exports/temporary_file.py b/posthog/temporal/batch_exports/temporary_file.py index 39651f3560d72..75cde8a9b651b 100644 --- a/posthog/temporal/batch_exports/temporary_file.py +++ b/posthog/temporal/batch_exports/temporary_file.py @@ -85,7 +85,8 @@ def __enter__(self): def __exit__(self, exc, value, tb): """Context-manager protocol exit method.""" - return self._file.__exit__(exc, value, tb) + self._file.__exit__(exc, value, tb) + return False def __iter__(self): yield from self._file @@ -334,9 +335,11 @@ async def open_temporary_file(self, current_flush_counter: int = 0): try: yield - except Exception as e: - self.error = e + + except Exception as temp_err: + self.error = temp_err raise + finally: self.track_bytes_written(temp_file) @@ -347,7 +350,7 @@ async def open_temporary_file(self, current_flush_counter: int = 0): # `write_record_batch`. For example, footer bytes. await self.flush(self.last_inserted_at, is_last=True) - self._batch_export_file = None + self._batch_export_file = None @property def batch_export_file(self): @@ -445,21 +448,24 @@ def __init__( self.default = default - def write(self, content: bytes) -> int: + def write_dict(self, d: dict[str, typing.Any]) -> int: """Write a single row of JSONL.""" try: - n = self.batch_export_file.write(orjson.dumps(content, default=str) + b"\n") + n = self.batch_export_file.write(orjson.dumps(d, default=str) + b"\n") except orjson.JSONEncodeError: # orjson is very strict about invalid unicode. This slow path protects us against # things we've observed in practice, like single surrogate codes, e.g. "\ud83d" - cleaned_content = replace_broken_unicode(content) + cleaned_content = replace_broken_unicode(d) n = self.batch_export_file.write(orjson.dumps(cleaned_content, default=str) + b"\n") return n def _write_record_batch(self, record_batch: pa.RecordBatch) -> None: """Write records to a temporary file as JSONL.""" - for record in record_batch.to_pylist(): - self.write(record) + for record_dict in record_batch.to_pylist(): + if not record_dict: + continue + + self.write_dict(record_dict) class CSVBatchExportWriter(BatchExportWriter): diff --git a/posthog/temporal/batch_exports/utils.py b/posthog/temporal/batch_exports/utils.py index 6a68b9f035835..e0d8cddc19439 100644 --- a/posthog/temporal/batch_exports/utils.py +++ b/posthog/temporal/batch_exports/utils.py @@ -1,17 +1,19 @@ import asyncio import collections.abc import contextlib -import json +import functools import typing import uuid import orjson import pyarrow as pa +import structlog from posthog.batch_exports.models import BatchExportRun from posthog.batch_exports.service import aupdate_batch_export_run T = typing.TypeVar("T") +logger = structlog.get_logger() def peek_first_and_rewind( @@ -120,12 +122,41 @@ class JsonScalar(pa.ExtensionScalar): """Represents a JSON binary string.""" def as_py(self) -> dict | None: + """Try to convert value to Python representation. + + We attempt to decode the value returned by `as_py` as JSON 3 times: + 1. As returned by `as_py`, without changes. + 2. By replacing any encoding errors. + 3. By treating the value as a string and surrouding it with quotes. + + If all else fails, we will log the offending value and re-raise the decoding error. + """ if self.value: + value = self.value.as_py() + + if not value: + return None + + try: + return orjson.loads(value.encode("utf-8")) + except orjson.JSONDecodeError: + pass + + try: + return orjson.loads(value.encode("utf-8", "replace")) + except orjson.JSONDecodeError: + pass + + if isinstance(value, str) and len(value) > 0: + # Handles `"$set": "Something"` + value = f'"{value}"' + try: - return orjson.loads(self.value.as_py().encode("utf-8")) - except: - # Fallback if it's something orjson can't handle - return json.loads(self.value.as_py()) + return orjson.loads(value.encode("utf-8", "replace")) + except orjson.JSONDecodeError: + logger.exception("Failed to decode: %s", value) + raise + else: return None @@ -171,3 +202,45 @@ def cast_record_batch_json_columns( record_batch.select(remaining_column_names).columns + casted_arrays, names=remaining_column_names + list(intersection), ) + + +_Result = typing.TypeVar("_Result") +FutureLike = ( + asyncio.Future[_Result] | collections.abc.Coroutine[None, typing.Any, _Result] | collections.abc.Awaitable[_Result] +) + + +def make_retryable_with_exponential_backoff( + func: typing.Callable[..., collections.abc.Awaitable[_Result]], + timeout: float | int | None = None, + max_attempts: int = 5, + initial_retry_delay: float | int = 2, + max_retry_delay: float | int = 32, + exponential_backoff_coefficient: int = 2, + retryable_exceptions: tuple[type[Exception], ...] = (Exception,), + is_exception_retryable: typing.Callable[[Exception], bool] = lambda _: True, +) -> typing.Callable[..., collections.abc.Awaitable[_Result]]: + """Retry the provided async `func` until `max_attempts` is reached.""" + functools.wraps(func) + + async def inner(*args, **kwargs): + attempt = 0 + + while True: + try: + result = await asyncio.wait_for(func(*args, **kwargs), timeout=timeout) + + except retryable_exceptions as err: + attempt += 1 + + if is_exception_retryable(err) is False or attempt >= max_attempts: + raise + + await asyncio.sleep( + min(max_retry_delay, initial_retry_delay * (attempt**exponential_backoff_coefficient)) + ) + + else: + return result + + return inner diff --git a/posthog/temporal/data_imports/pipelines/pipeline.py b/posthog/temporal/data_imports/pipelines/pipeline.py index a1d03fddf2183..b0715f952f854 100644 --- a/posthog/temporal/data_imports/pipelines/pipeline.py +++ b/posthog/temporal/data_imports/pipelines/pipeline.py @@ -48,11 +48,6 @@ def __init__( ): self.inputs = inputs self.logger = logger - if incremental: - # Incremental syncs: Assuming each page is 100 items for now so bound each run at 50_000 items - self.source = source.add_limit(500) - else: - self.source = source self._incremental = incremental self.refresh_dlt = reset_pipeline @@ -62,6 +57,12 @@ def __init__( and inputs.job_type != ExternalDataSource.Type.SNOWFLAKE ) + if self.should_chunk_pipeline: + # Incremental syncs: Assuming each page is 100 items for now so bound each run at 50_000 items + self.source = source.add_limit(500) + else: + self.source = source + def _get_pipeline_name(self): return f"{self.inputs.job_type}_pipeline_{self.inputs.team_id}_run_{self.inputs.schema_id}" diff --git a/posthog/temporal/tests/batch_exports/test_batch_export_utils.py b/posthog/temporal/tests/batch_exports/test_batch_export_utils.py index 5421e288784ab..968eab5c0d723 100644 --- a/posthog/temporal/tests/batch_exports/test_batch_export_utils.py +++ b/posthog/temporal/tests/batch_exports/test_batch_export_utils.py @@ -5,7 +5,7 @@ import pytest_asyncio from posthog.batch_exports.models import BatchExportRun -from posthog.temporal.batch_exports.utils import set_status_to_running_task +from posthog.temporal.batch_exports.utils import make_retryable_with_exponential_backoff, set_status_to_running_task from posthog.temporal.common.logger import bind_temporal_worker_logger from posthog.temporal.tests.utils.models import ( acreate_batch_export, @@ -73,3 +73,92 @@ async def test_batch_export_run_is_set_to_running(ateam, s3_batch_export): await run.arefresh_from_db() assert run.status == BatchExportRun.Status.RUNNING + + +async def test_make_retryable_with_exponential_backoff_called_max_attempts(): + """Test function wrapped is called all `max_attempts` times.""" + counter = 0 + + async def raise_value_error(): + nonlocal counter + counter += 1 + + raise ValueError("I failed") + + with pytest.raises(ValueError): + await make_retryable_with_exponential_backoff(raise_value_error, max_retry_delay=1)() + + assert counter == 5 + + +async def test_make_retryable_with_exponential_backoff_called_max_attempts_if_timesout(): + """Test function wrapped is called all `max_attempts` times on a timeout.""" + counter = 0 + + async def raise_value_error(): + nonlocal counter + counter += 1 + await asyncio.sleep(10) + + with pytest.raises(TimeoutError): + await make_retryable_with_exponential_backoff(raise_value_error, max_retry_delay=1, timeout=1)() + + assert counter == 5 + + +async def test_make_retryable_with_exponential_backoff_called_max_attempts_if_func_returns_retryable(): + """Test function wrapped is called all `max_attempts` times if `is_exception_retryable` returns `True`.""" + counter = 0 + + def is_exception_retryable(err): + return True + + async def raise_value_error(): + nonlocal counter + counter += 1 + + raise ValueError("I failed") + + with pytest.raises(ValueError): + await make_retryable_with_exponential_backoff( + raise_value_error, is_exception_retryable=is_exception_retryable, max_retry_delay=1 + )() + + assert counter == 5 + + +async def test_make_retryable_with_exponential_backoff_raises_if_func_returns_not_retryable(): + """Test function wrapped raises immediately if `is_exception_retryable` returns `False`.""" + counter = 0 + + def is_exception_retryable(err): + return False + + async def raise_value_error(): + nonlocal counter + counter += 1 + + raise ValueError("I failed") + + with pytest.raises(ValueError): + await make_retryable_with_exponential_backoff( + raise_value_error, is_exception_retryable=is_exception_retryable + )() + + assert counter == 1 + + +async def test_make_retryable_with_exponential_backoff_raises_if_not_retryable(): + """Test function wrapped raises immediately if exception not in `retryable_exceptions`.""" + counter = 0 + + async def raise_value_error(): + nonlocal counter + counter += 1 + + raise ValueError("I failed") + + with pytest.raises(ValueError): + await make_retryable_with_exponential_backoff(raise_value_error, retryable_exceptions=(TypeError,))() + + assert counter == 1 diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py index 648ae2aa765ce..79fb6986ed16c 100644 --- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py @@ -1407,7 +1407,7 @@ async def test_s3_export_workflow_with_request_timeouts( elif model is not None: batch_export_schema = model - raised = False + raised = 0 class FakeSession(aioboto3.Session): @contextlib.asynccontextmanager @@ -1420,8 +1420,8 @@ async def client(self, *args, **kwargs): async def faulty_upload_part(*args, **kwargs): nonlocal raised - if not raised: - raised = True + if raised < 5: + raised = raised + 1 raise botocore.exceptions.ClientError( error_response={ "Error": {"Code": "RequestTimeout", "Message": "Oh no!"}, @@ -1436,6 +1436,11 @@ async def faulty_upload_part(*args, **kwargs): yield client + class DoNotRetryPolicy(RetryPolicy): + def __init__(self, *args, **kwargs): + kwargs["maximum_attempts"] = 1 + super().__init__(*args, **kwargs) + workflow_id = str(uuid.uuid4()) inputs = S3BatchExportInputs( team_id=ateam.pk, @@ -1447,8 +1452,9 @@ async def faulty_upload_part(*args, **kwargs): **s3_batch_export.destination.config, ) - async with await WorkflowEnvironment.start_time_skipping() as activity_environment: - async with Worker( + async with ( + await WorkflowEnvironment.start_time_skipping() as activity_environment, + Worker( activity_environment.client, task_queue=settings.TEMPORAL_TASK_QUEUE, workflows=[S3BatchExportWorkflow], @@ -1458,16 +1464,20 @@ async def faulty_upload_part(*args, **kwargs): finish_batch_export_run, ], workflow_runner=UnsandboxedWorkflowRunner(), + ), + ): + with ( + mock.patch("posthog.temporal.batch_exports.s3_batch_export.aioboto3.Session", FakeSession), + mock.patch("posthog.temporal.batch_exports.batch_exports.RetryPolicy", DoNotRetryPolicy), ): - with mock.patch("posthog.temporal.batch_exports.s3_batch_export.aioboto3.Session", FakeSession): - await activity_environment.client.execute_workflow( - S3BatchExportWorkflow.run, - inputs, - id=workflow_id, - task_queue=settings.TEMPORAL_TASK_QUEUE, - retry_policy=RetryPolicy(maximum_attempts=2), - execution_timeout=dt.timedelta(seconds=10), - ) + await activity_environment.client.execute_workflow( + S3BatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=2), + execution_timeout=dt.timedelta(minutes=2), + ) runs = await afetch_batch_export_runs(batch_export_id=s3_batch_export.id) assert len(runs) == 2 @@ -1478,6 +1488,10 @@ async def faulty_upload_part(*args, **kwargs): (events_to_export_created, persons_to_export_created) = generate_test_data assert run.status == "FailedRetryable" assert run.records_completed is None + assert ( + run.latest_error + == "IntermittentUploadPartTimeoutError: An intermittent `RequestTimeout` was raised while attempting to upload part 1" + ) run = runs[1] (events_to_export_created, persons_to_export_created) = generate_test_data diff --git a/posthog/test/test_migration_0459.py b/posthog/test/test_migration_0459.py new file mode 100644 index 0000000000000..444f548d46bff --- /dev/null +++ b/posthog/test/test_migration_0459.py @@ -0,0 +1,232 @@ +from typing import Any + +from posthog.test.base import NonAtomicTestMigrations + + +class ConvertPersonsNodeInsightsToActorsQueryMigrationTest(NonAtomicTestMigrations): + migrate_from = "0458_alter_insightviewed_team_alter_insightviewed_user" + migrate_to = "0459_convert_personsnode_insights_to_actorsquery" + + CLASS_DATA_LEVEL_SETUP = False + + def setUpBeforeMigration(self, apps: Any) -> None: + Organization = apps.get_model("posthog", "Organization") + Project = apps.get_model("posthog", "Project") + Team = apps.get_model("posthog", "Team") + Insight = apps.get_model("posthog", "Insight") + + self.organization = Organization.objects.create(name="o1") + self.project = Project.objects.create(organization=self.organization, name="p1", id=1000001) + self.team = Team.objects.create(organization=self.organization, name="t1", project=self.project) + + self.insight_1 = Insight.objects.create( + team=self.team, + query={"full": True, "kind": "DataTableNode", "source": {"kind": "PersonsNode", "cohort": "4669"}}, + ) + self.insight_2 = Insight.objects.create( + team=self.team, + deleted=True, + query={ + "full": True, + "kind": "DataTableNode", + "source": {"kind": "PersonsNode", "search": "@"}, + "propertiesViaUrl": True, + }, + ) + self.insight_3 = Insight.objects.create( + team=self.team, + query={ + "full": True, + "kind": "DataTableNode", + "source": { + "kind": "PersonsNode", + "properties": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}], + }, + "propertiesViaUrl": True, + }, + ) + self.insight_4 = Insight.objects.create( + team=self.team, + query={ + "full": True, + "kind": "DataTableNode", + "source": { + "kind": "PersonsNode", + "cohort": "3", + "properties": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}], + }, + "propertiesViaUrl": True, + }, + ) + self.insight_5 = Insight.objects.create( + team=self.team, + query={"full": True, "kind": "DataTableNode", "source": {"kind": "PersonsNode"}, "propertiesViaUrl": True}, + ) + self.insight_6 = Insight.objects.create( + team=self.team, + query={ + "full": True, + "kind": "DataTableNode", + "source": { + "kind": "PersonsNode", + "fixedProperties": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}], + "properties": [ + {"key": "name", "type": "person", "value": "is_set", "operator": "is_set"}, + {"key": "surname", "type": "person", "value": "is_set", "operator": "is_set"}, + {"key": "id", "type": "cohort", "value": 3}, + ], + "limit": 100, + "offset": 100, + }, + "propertiesViaUrl": True, + }, + ) + self.insight_7 = Insight.objects.create( + team=self.team, + query={ + "kind": "DataTableNode", + "source": { + "kind": "ActorsQuery", + "cohort": 3, + }, + }, + ) + self.insight_8 = Insight.objects.create( + team=self.team, + query={ + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "cohort": 3, + "series": [{"kind": "EventsNode", "event": "$pageview"}], + }, + }, + ) + + def test_migration(self) -> None: + # Ensure self.apps is not None + assert self.apps is not None + + self.insight_1.refresh_from_db() + self.assertEqual( + self.insight_1.query, + { + "full": True, + "kind": "DataTableNode", + "source": { + "kind": "ActorsQuery", + "properties": [{"key": "id", "type": "cohort", "value": 4669}], + }, + }, + ) + + self.insight_2.refresh_from_db() + self.assertEqual( + self.insight_2.query, + { + "full": True, + "kind": "DataTableNode", + "source": {"kind": "ActorsQuery", "search": "@", "properties": []}, + "propertiesViaUrl": True, + }, + ) + + self.insight_3.refresh_from_db() + self.assertEqual( + self.insight_3.query, + { + "full": True, + "kind": "DataTableNode", + "source": { + "kind": "ActorsQuery", + "properties": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}], + }, + "propertiesViaUrl": True, + }, + ) + + self.insight_4.refresh_from_db() + self.assertEqual( + self.insight_4.query, + { + "full": True, + "kind": "DataTableNode", + "source": { + "kind": "ActorsQuery", + "properties": [ + {"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}, + {"key": "id", "type": "cohort", "value": 3}, + ], + }, + "propertiesViaUrl": True, + }, + ) + + self.insight_5.refresh_from_db() + self.assertEqual( + self.insight_5.query, + { + "full": True, + "kind": "DataTableNode", + "source": {"kind": "ActorsQuery", "properties": []}, + "propertiesViaUrl": True, + }, + ) + + self.insight_6.refresh_from_db() + self.assertEqual( + self.insight_6.query, + { + "full": True, + "kind": "DataTableNode", + "source": { + "kind": "ActorsQuery", + "fixedProperties": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}], + "properties": [ + {"key": "name", "type": "person", "value": "is_set", "operator": "is_set"}, + {"key": "surname", "type": "person", "value": "is_set", "operator": "is_set"}, + {"key": "id", "type": "cohort", "value": 3}, + ], + "limit": 100, + "offset": 100, + }, + "propertiesViaUrl": True, + }, + ) + + self.insight_7.refresh_from_db() + self.assertEqual( + self.insight_7.query, + { + "kind": "DataTableNode", + "source": { + "kind": "ActorsQuery", + "cohort": 3, + }, + }, + ) + + self.insight_8.refresh_from_db() + self.assertEqual( + self.insight_8.query, + { + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "series": [{"kind": "EventsNode", "event": "$pageview"}], + "cohort": 3, + }, + }, + ) + + def tearDown(self) -> None: + # Ensure self.apps is not None + assert self.apps is not None + + Insight = self.apps.get_model("posthog", "Insight") + Insight.objects.all().delete() + self.team.delete() + self.project.delete() + self.organization.delete() + + super().tearDown() diff --git a/posthog/types.py b/posthog/types.py index c5b42dd8de896..ea5e3080eae4f 100644 --- a/posthog/types.py +++ b/posthog/types.py @@ -25,6 +25,7 @@ PersonPropertyFilter, RecordingPropertyFilter, SessionPropertyFilter, + LogEntryPropertyFilter, TrendsQuery, FunnelsQuery, RetentionQuery, @@ -52,6 +53,7 @@ PersonPropertyFilter, ElementPropertyFilter, SessionPropertyFilter, + LogEntryPropertyFilter, CohortPropertyFilter, RecordingPropertyFilter, GroupPropertyFilter, diff --git a/posthog/users_scripts/aggregate_funnel.py b/posthog/users_scripts/aggregate_funnel.py index 130aa9df5fa67..20099a65b1ec5 100755 --- a/posthog/users_scripts/aggregate_funnel.py +++ b/posthog/users_scripts/aggregate_funnel.py @@ -1,25 +1,22 @@ #!/usr/bin/python3 -import ast +import json import sys from dataclasses import dataclass, replace from itertools import groupby, permutations from typing import Any, cast -from collections.abc import Callable from collections.abc import Sequence -N_ARGS = 6 - def parse_args(line): - arg_functions: list[Callable] = [int, int, str, str, ast.literal_eval, ast.literal_eval] - args = [] - start = 0 - for i in range(N_ARGS - 1): - end = line.find("\t", start) - args.append(arg_functions[i](line[start:end])) - start = end + 1 - args.append(arg_functions[-1](line[start:])) - return args + args = json.loads(line) + return [ + int(args["num_steps"]), + int(args["conversion_window_limit"]), + str(args["breakdown_attribution_type"]), + str(args["funnel_order_type"]), + args["prop_vals"], # Array(Array(String)) + args["value"], # Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) + ] @dataclass(frozen=True) @@ -28,21 +25,6 @@ class EnteredTimestamp: timings: Any -def breakdown_to_single_quoted_string(breakdown): - if isinstance(breakdown, str): - return "'" + breakdown.replace("'", r"\'") + "'" - if isinstance(breakdown, int): - return breakdown - if isinstance(breakdown, list): - if not breakdown: - return "[]" - if isinstance(breakdown[0], str): - return "['" + "','".join([x.replace("'", r"\'") for x in breakdown]) + "']" - if isinstance(breakdown[0], int): - return str(breakdown) - raise Exception() - - # each one can be multiple steps here # it only matters when they entered the funnel - you can propagate the time from the previous step when you update # This function is defined for Clickhouse in test_function.xml along with types @@ -83,7 +65,7 @@ def process_event(timestamp, breakdown, steps, *, entered_timestamp, prop_val) - if in_match_window and not already_reached_this_step_with_same_entered_timestamp: if exclusion: - results.append(f"(-1, {breakdown_to_single_quoted_string(prop_val)}, [])") + results.append((-1, prop_val, [])) return False is_unmatched_step_attribution = ( breakdown_step is not None and step == breakdown_step - 1 and prop_val != breakdown @@ -112,9 +94,7 @@ def loop_prop_val(prop_val): def add_max_step(): i = cast(int, max_step[0]) final = cast(EnteredTimestamp, max_step[1]) - results.append( - f"({i - 1}, {breakdown_to_single_quoted_string(prop_val)}, {str([final.timings[i] - final.timings[i - 1] for i in range(1, i)])})" - ) + results.append((i - 1, prop_val, [final.timings[i] - final.timings[i - 1] for i in range(1, i)])) filtered_events = ( ((timestamp, breakdown, steps) for (timestamp, breakdown, steps) in events if breakdown == prop_val) @@ -155,7 +135,7 @@ def add_max_step(): return [loop_prop_val(prop_val) for prop_val in prop_vals] - print(f"[{','.join(results)}]") # noqa: T201 + print(json.dumps({"result": results}), end="\n") # noqa: T201 if __name__ == "__main__": diff --git a/posthog/users_scripts/aggregate_funnel_array.py b/posthog/users_scripts/aggregate_funnel_array.py deleted file mode 100755 index 9fa533cfa1c39..0000000000000 --- a/posthog/users_scripts/aggregate_funnel_array.py +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/python3 - -from aggregate_funnel import parse_user_aggregation_with_conversion_window_and_breakdown, parse_args -import sys - -if __name__ == "__main__": - for line in sys.stdin: - parse_user_aggregation_with_conversion_window_and_breakdown(*parse_args(line)) - sys.stdout.flush() diff --git a/posthog/users_scripts/aggregate_funnel_array_trends_test.py b/posthog/users_scripts/aggregate_funnel_array_trends_test.py new file mode 100755 index 0000000000000..25930be27a2cf --- /dev/null +++ b/posthog/users_scripts/aggregate_funnel_array_trends_test.py @@ -0,0 +1,13 @@ +#!/usr/bin/python3 + +from aggregate_funnel_trends import parse_user_aggregation_with_conversion_window_and_breakdown, parse_args +import sys +import json + +if __name__ == "__main__": + for line in sys.stdin: + try: + parse_user_aggregation_with_conversion_window_and_breakdown(*parse_args(line)) + except Exception as e: + print(json.dumps({"result": json.dumps(str(e))}), end="\n") # noqa: T201 + sys.stdout.flush() diff --git a/posthog/users_scripts/aggregate_funnel_cohort.py b/posthog/users_scripts/aggregate_funnel_cohort.py deleted file mode 100755 index 9fa533cfa1c39..0000000000000 --- a/posthog/users_scripts/aggregate_funnel_cohort.py +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/python3 - -from aggregate_funnel import parse_user_aggregation_with_conversion_window_and_breakdown, parse_args -import sys - -if __name__ == "__main__": - for line in sys.stdin: - parse_user_aggregation_with_conversion_window_and_breakdown(*parse_args(line)) - sys.stdout.flush() diff --git a/posthog/users_scripts/test_function.py b/posthog/users_scripts/aggregate_funnel_test.py similarity index 78% rename from posthog/users_scripts/test_function.py rename to posthog/users_scripts/aggregate_funnel_test.py index 1b7b0256d3171..513e07d386d63 100755 --- a/posthog/users_scripts/test_function.py +++ b/posthog/users_scripts/aggregate_funnel_test.py @@ -1,4 +1,5 @@ #!/usr/bin/python3 +import json from aggregate_funnel import parse_user_aggregation_with_conversion_window_and_breakdown, parse_args import sys @@ -8,5 +9,5 @@ try: parse_user_aggregation_with_conversion_window_and_breakdown(*parse_args(line)) except Exception as e: - print(e, line) # noqa: T201 + print(json.dumps({"result": json.dumps(str(e))}), end="\n") # noqa: T201 sys.stdout.flush() diff --git a/posthog/users_scripts/aggregate_funnel_trends.py b/posthog/users_scripts/aggregate_funnel_trends.py new file mode 100755 index 0000000000000..3a61961526a40 --- /dev/null +++ b/posthog/users_scripts/aggregate_funnel_trends.py @@ -0,0 +1,131 @@ +#!/usr/bin/python3 +import sys +from dataclasses import dataclass, replace +from typing import Any +from collections.abc import Sequence +import json + + +def parse_args(line): + args = json.loads(line) + return [ + int(args["from_step"]), + int(args["num_steps"]), + int(args["conversion_window_limit"]), + str(args["breakdown_attribution_type"]), + str(args["funnel_order_type"]), + args["prop_vals"], # Array(Array(String)) + args["value"], # Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) + ] + + +@dataclass(frozen=True) +class EnteredTimestamp: + timestamp: Any + timings: Any + + +# each one can be multiple steps here +# it only matters when they entered the funnel - you can propagate the time from the previous step when you update +# This function is defined for Clickhouse in test_function.xml along with types +# num_steps is the total number of steps in the funnel +# conversion_window_limit is in seconds +# events is a array of tuples of (timestamp, breakdown, [steps]) +# steps is an array of integers which represent the steps that this event qualifies for. it looks like [1,3,5,6]. +# negative integers represent an exclusion on that step. each event is either all exclusions or all steps. +def parse_user_aggregation_with_conversion_window_and_breakdown( + from_step: int, + num_steps: int, + conversion_window_limit_seconds: int, + breakdown_attribution_type: str, + funnel_order_type: str, + prop_vals: list[Any], + events: Sequence[tuple[float, int, list[str] | int | str, list[int]]], +): + default_entered_timestamp = EnteredTimestamp(0, []) + # If the attribution mode is a breakdown step, set this to the integer that represents that step + breakdown_step = int(breakdown_attribution_type[5:]) if breakdown_attribution_type.startswith("step_") else None + + # Results is a map of start intervals to success or failure. If an interval isn't here, it means the + # user didn't enter + results = {} + + # We call this for each possible breakdown value. + def loop_prop_val(prop_val): + # we need to track every distinct entry into the funnel through to the end + filtered_events = ( + ( + (timestamp, interval_start, breakdown, steps) + for (timestamp, interval_start, breakdown, steps) in events + if breakdown == prop_val + ) + if breakdown_attribution_type == "all_events" + else events + ) + list_of_entered_timestamps = [] + + for timestamp, interval_start, breakdown, steps in filtered_events: + for step in reversed(steps): + exclusion = False + if step < 0: + exclusion = True + step = -step + # Special code to handle the first step + # Potential Optimization: we could skip tracking here if the user has already completed the funnel for this interval + if step == 1: + entered_timestamp = [default_entered_timestamp] * (num_steps + 1) + # Set the interval start at 0, which is what we want to return if this works. + # For strict funnels, we need to track if the "from_step" has been hit + # Abuse the timings field on the 0th index entered_timestamp to have the elt True if we have + entered_timestamp[0] = EnteredTimestamp(interval_start, [True] if from_step == 0 else []) + entered_timestamp[1] = EnteredTimestamp(timestamp, [timestamp]) + list_of_entered_timestamps.append(entered_timestamp) + else: + for entered_timestamp in list_of_entered_timestamps[:]: + in_match_window = ( + timestamp - entered_timestamp[step - 1].timestamp <= conversion_window_limit_seconds + ) + already_reached_this_step_with_same_entered_timestamp = ( + entered_timestamp[step].timestamp == entered_timestamp[step - 1].timestamp + ) + if in_match_window and not already_reached_this_step_with_same_entered_timestamp: + if exclusion: + # this is a complete failure, exclude this person, don't print anything, don't count + return False + is_unmatched_step_attribution = ( + breakdown_step is not None and step == breakdown_step - 1 and prop_val != breakdown + ) + if not is_unmatched_step_attribution: + entered_timestamp[step] = replace( + entered_timestamp[step - 1], + timings=[*entered_timestamp[step - 1].timings, timestamp], + ) + # check if we have hit the goal. if we have, remove it from the list and add it to the successful_timestamps + if entered_timestamp[num_steps].timestamp > 0: + results[entered_timestamp[0].timestamp] = (1, prop_val) + list_of_entered_timestamps.remove(entered_timestamp) + # If we have hit the from_step threshold, record it (abuse the timings field) + elif step == from_step + 1: + entered_timestamp[0].timings.append(True) + + # At the end of the event, clear all steps that weren't done by that event + if funnel_order_type == "strict": + for entered_timestamp in list_of_entered_timestamps[:]: + for i in range(1, len(entered_timestamp)): + if i not in steps: + entered_timestamp[i] = default_entered_timestamp + + # At this point, everything left in entered_timestamps is a failure, if it has made it to from_step + for entered_timestamp in list_of_entered_timestamps: + if entered_timestamp[0].timestamp not in results and len(entered_timestamp[0].timings) > 0: + results[entered_timestamp[0].timestamp] = (-1, prop_val) + + [loop_prop_val(prop_val) for prop_val in prop_vals] + result = [(interval_start, success_bool, prop_val) for interval_start, (success_bool, prop_val) in results.items()] + print(json.dumps({"result": result}), end="\n") # noqa: T201 + + +if __name__ == "__main__": + for line in sys.stdin: + parse_user_aggregation_with_conversion_window_and_breakdown(*parse_args(line)) + sys.stdout.flush() diff --git a/posthog/utils.py b/posthog/utils.py index 5249e7bf647df..aaf02658b42d1 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -20,6 +20,7 @@ from typing import TYPE_CHECKING, Any, Optional, Union, cast from urllib.parse import unquote, urljoin, urlparse from zoneinfo import ZoneInfo +from rest_framework import serializers import lzstring import posthoganalytics @@ -69,7 +70,6 @@ DEFAULT_DATE_FROM_DAYS = 7 - logger = structlog.get_logger(__name__) # https://stackoverflow.com/questions/4060221/how-to-reliably-open-a-file-in-the-same-directory-as-a-python-script @@ -1043,6 +1043,18 @@ def cache_requested_by_client(request: Request) -> bool | str: return _request_has_key_set("use_cache", request) +def filters_override_requested_by_client(request: Request) -> Optional[dict]: + raw_filters = request.query_params.get("filters_override") + + if raw_filters is not None: + try: + return json.loads(raw_filters) + except Exception: + raise serializers.ValidationError({"filters_override": "Invalid JSON passed in filters_override parameter"}) + + return None + + def _request_has_key_set(key: str, request: Request, allowed_values: Optional[list[str]] = None) -> bool | str: query_param = request.query_params.get(key) data_value = request.data.get(key) diff --git a/posthog/warehouse/api/external_data_schema.py b/posthog/warehouse/api/external_data_schema.py index 1242e45139944..5b982e54b8434 100644 --- a/posthog/warehouse/api/external_data_schema.py +++ b/posthog/warehouse/api/external_data_schema.py @@ -6,7 +6,7 @@ from typing import Optional, Any from posthog.api.routing import TeamAndOrgViewSetMixin from rest_framework import viewsets, filters, status -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.exceptions import ValidationError from rest_framework.request import Request from rest_framework.response import Response @@ -21,7 +21,6 @@ trigger_external_data_workflow, unpause_external_data_schedule, cancel_external_data_workflow, - delete_data_import_folder, ) from posthog.warehouse.models.external_data_schema import ( filter_mysql_incremental_fields, @@ -251,17 +250,9 @@ def resync(self, request: Request, *args: Any, **kwargs: Any): if latest_running_job and latest_running_job.workflow_id and latest_running_job.status == "Running": cancel_external_data_workflow(latest_running_job.workflow_id) - all_jobs = ExternalDataJob.objects.filter( - schema_id=instance.pk, team_id=instance.team_id, status="Completed" - ).all() - - # Unnecessary to iterate for incremental jobs since they'll all by identified by the schema_id. Be over eager just to clear remnants - for job in all_jobs: - try: - delete_data_import_folder(job.folder_path()) - except Exception as e: - logger.exception(f"Could not clean up data import folder: {job.folder_path()}", exc_info=e) - pass + source: ExternalDataSource = instance.source + source.job_inputs.update({"reset_pipeline": True}) + source.save() try: trigger_external_data_workflow(instance) diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py index 07b7cb116bfa9..3fa01db7c1a9b 100644 --- a/posthog/warehouse/api/external_data_source.py +++ b/posthog/warehouse/api/external_data_source.py @@ -6,7 +6,7 @@ from sentry_sdk import capture_exception import structlog from rest_framework import filters, serializers, status, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from rest_framework.request import Request from rest_framework.response import Response diff --git a/posthog/warehouse/api/table.py b/posthog/warehouse/api/table.py index 77b579bad122a..7fffad6ae5714 100644 --- a/posthog/warehouse/api/table.py +++ b/posthog/warehouse/api/table.py @@ -1,7 +1,7 @@ from typing import Any from rest_framework import exceptions, filters, request, response, serializers, status, viewsets -from rest_framework.decorators import action +from posthog.api.utils import action from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import UserBasicSerializer diff --git a/posthog/warehouse/models/credential.py b/posthog/warehouse/models/credential.py index 0be74816459d8..39828f51898d8 100644 --- a/posthog/warehouse/models/credential.py +++ b/posthog/warehouse/models/credential.py @@ -9,7 +9,7 @@ class DataWarehouseCredential(CreatedMetaFields, UUIDModel): access_key: EncryptedTextField = EncryptedTextField(max_length=500) access_secret: EncryptedTextField = EncryptedTextField(max_length=500) - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) + team = models.ForeignKey(Team, on_delete=models.CASCADE) __repr__ = sane_repr("access_key") diff --git a/posthog/warehouse/models/datawarehouse_saved_query.py b/posthog/warehouse/models/datawarehouse_saved_query.py index 204a85c15f80f..e3cccac60488b 100644 --- a/posthog/warehouse/models/datawarehouse_saved_query.py +++ b/posthog/warehouse/models/datawarehouse_saved_query.py @@ -37,18 +37,16 @@ def validate_saved_query_name(value): class DataWarehouseSavedQuery(CreatedMetaFields, UUIDModel, DeletedMetaFields): - name: models.CharField = models.CharField(max_length=128, validators=[validate_saved_query_name]) - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) - columns: models.JSONField = models.JSONField( + name = models.CharField(max_length=128, validators=[validate_saved_query_name]) + team = models.ForeignKey(Team, on_delete=models.CASCADE) + columns = models.JSONField( default=dict, null=True, blank=True, help_text="Dict of all columns with ClickHouse type (including Nullable())", ) - external_tables: models.JSONField = models.JSONField( - default=list, null=True, blank=True, help_text="List of all external tables" - ) - query: models.JSONField = models.JSONField(default=dict, null=True, blank=True, help_text="HogQL query") + external_tables = models.JSONField(default=list, null=True, blank=True, help_text="List of all external tables") + query = models.JSONField(default=dict, null=True, blank=True, help_text="HogQL query") class Meta: constraints = [ diff --git a/posthog/warehouse/models/external_data_job.py b/posthog/warehouse/models/external_data_job.py index 7b7f1cc15e1a8..3b85c70029405 100644 --- a/posthog/warehouse/models/external_data_job.py +++ b/posthog/warehouse/models/external_data_job.py @@ -16,21 +16,15 @@ class Status(models.TextChoices): COMPLETED = "Completed", "Completed" CANCELLED = "Cancelled", "Cancelled" - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) - pipeline: models.ForeignKey = models.ForeignKey( - "posthog.ExternalDataSource", related_name="jobs", on_delete=models.CASCADE - ) - schema: models.ForeignKey = models.ForeignKey( - "posthog.ExternalDataSchema", on_delete=models.CASCADE, null=True, blank=True - ) - status: models.CharField = models.CharField(max_length=400) - rows_synced: models.BigIntegerField = models.BigIntegerField(null=True, blank=True) - latest_error: models.TextField = models.TextField( - null=True, help_text="The latest error that occurred during this run." - ) - - workflow_id: models.CharField = models.CharField(max_length=400, null=True, blank=True) - workflow_run_id: models.CharField = models.CharField(max_length=400, null=True, blank=True) + team = models.ForeignKey(Team, on_delete=models.CASCADE) + pipeline = models.ForeignKey("posthog.ExternalDataSource", related_name="jobs", on_delete=models.CASCADE) + schema = models.ForeignKey("posthog.ExternalDataSchema", on_delete=models.CASCADE, null=True, blank=True) + status = models.CharField(max_length=400) + rows_synced = models.BigIntegerField(null=True, blank=True) + latest_error = models.TextField(null=True, help_text="The latest error that occurred during this run.") + + workflow_id = models.CharField(max_length=400, null=True, blank=True) + workflow_run_id = models.CharField(max_length=400, null=True, blank=True) __repr__ = sane_repr("id") diff --git a/posthog/warehouse/models/external_data_schema.py b/posthog/warehouse/models/external_data_schema.py index 8306f3468fb8b..f42cf3248b8ad 100644 --- a/posthog/warehouse/models/external_data_schema.py +++ b/posthog/warehouse/models/external_data_schema.py @@ -38,22 +38,16 @@ class SyncFrequency(models.TextChoices): WEEKLY = "week", "Weekly" MONTHLY = "month", "Monthly" - name: models.CharField = models.CharField(max_length=400) - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) - source: models.ForeignKey = models.ForeignKey( - "posthog.ExternalDataSource", related_name="schemas", on_delete=models.CASCADE - ) - table: models.ForeignKey = models.ForeignKey( - "posthog.DataWarehouseTable", on_delete=models.SET_NULL, null=True, blank=True - ) - should_sync: models.BooleanField = models.BooleanField(default=True) - latest_error: models.TextField = models.TextField( - null=True, help_text="The latest error that occurred when syncing this schema." - ) - status: models.CharField = models.CharField(max_length=400, null=True, blank=True) - last_synced_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) - sync_type: models.CharField = models.CharField(max_length=128, choices=SyncType.choices, null=True, blank=True) - sync_type_config: models.JSONField = models.JSONField( + name = models.CharField(max_length=400) + team = models.ForeignKey(Team, on_delete=models.CASCADE) + source = models.ForeignKey("posthog.ExternalDataSource", related_name="schemas", on_delete=models.CASCADE) + table = models.ForeignKey("posthog.DataWarehouseTable", on_delete=models.SET_NULL, null=True, blank=True) + should_sync = models.BooleanField(default=True) + latest_error = models.TextField(null=True, help_text="The latest error that occurred when syncing this schema.") + status = models.CharField(max_length=400, null=True, blank=True) + last_synced_at = models.DateTimeField(null=True, blank=True) + sync_type = models.CharField(max_length=128, choices=SyncType.choices, null=True, blank=True) + sync_type_config = models.JSONField( default=dict, blank=True, ) @@ -61,9 +55,7 @@ class SyncFrequency(models.TextChoices): sync_frequency = deprecate_field( models.CharField(max_length=128, choices=SyncFrequency.choices, default=SyncFrequency.DAILY, blank=True) ) - sync_frequency_interval: models.DurationField = models.DurationField( - default=timedelta(hours=6), null=True, blank=True - ) + sync_frequency_interval = models.DurationField(default=timedelta(hours=6), null=True, blank=True) __repr__ = sane_repr("name") diff --git a/posthog/warehouse/models/external_data_source.py b/posthog/warehouse/models/external_data_source.py index 9ddd407f03fef..49c91d7781764 100644 --- a/posthog/warehouse/models/external_data_source.py +++ b/posthog/warehouse/models/external_data_source.py @@ -37,24 +37,24 @@ class SyncFrequency(models.TextChoices): MONTHLY = "month", "Monthly" # TODO provide flexible schedule definition - source_id: models.CharField = models.CharField(max_length=400) - connection_id: models.CharField = models.CharField(max_length=400) - destination_id: models.CharField = models.CharField(max_length=400, null=True, blank=True) - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) + source_id = models.CharField(max_length=400) + connection_id = models.CharField(max_length=400) + destination_id = models.CharField(max_length=400, null=True, blank=True) + team = models.ForeignKey(Team, on_delete=models.CASCADE) # Deprecated, use `ExternalDataSchema.sync_frequency_interval` - sync_frequency: models.CharField = models.CharField( + sync_frequency = models.CharField( max_length=128, choices=SyncFrequency.choices, default=SyncFrequency.DAILY, blank=True ) # `status` is deprecated in favour of external_data_schema.status - status: models.CharField = models.CharField(max_length=400) - source_type: models.CharField = models.CharField(max_length=128, choices=Type.choices) + status = models.CharField(max_length=400) + source_type = models.CharField(max_length=128, choices=Type.choices) job_inputs: encrypted_fields.fields.EncryptedJSONField = encrypted_fields.fields.EncryptedJSONField( null=True, blank=True ) - are_tables_created: models.BooleanField = models.BooleanField(default=False) - prefix: models.CharField = models.CharField(max_length=100, null=True, blank=True) + are_tables_created = models.BooleanField(default=False) + prefix = models.CharField(max_length=100, null=True, blank=True) __repr__ = sane_repr("id") diff --git a/posthog/warehouse/models/external_table_definitions.py b/posthog/warehouse/models/external_table_definitions.py index 62d0289f74f59..0d8e49fa4dca7 100644 --- a/posthog/warehouse/models/external_table_definitions.py +++ b/posthog/warehouse/models/external_table_definitions.py @@ -657,5 +657,5 @@ def get_dlt_mapping_for_external_table(table): "nullable": True, } for _, field in external_tables[table].items() - if type(field) != ast.ExpressionField + if type(field) is not ast.ExpressionField } diff --git a/posthog/warehouse/models/join.py b/posthog/warehouse/models/join.py index b6438cc294fb4..000b1ba34f9b2 100644 --- a/posthog/warehouse/models/join.py +++ b/posthog/warehouse/models/join.py @@ -26,20 +26,20 @@ def __init__(self, *args, **kwargs): warn("DataWarehouseViewLink is deprecated, use DataWarehouseJoin", DeprecationWarning, stacklevel=2) super().__init__(*args, **kwargs) - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) - table: models.CharField = models.CharField(max_length=128) - from_join_key: models.CharField = models.CharField(max_length=400) - saved_query: models.ForeignKey = models.ForeignKey(DataWarehouseSavedQuery, on_delete=models.CASCADE) - to_join_key: models.CharField = models.CharField(max_length=400) + team = models.ForeignKey(Team, on_delete=models.CASCADE) + table = models.CharField(max_length=128) + from_join_key = models.CharField(max_length=400) + saved_query = models.ForeignKey(DataWarehouseSavedQuery, on_delete=models.CASCADE) + to_join_key = models.CharField(max_length=400) class DataWarehouseJoin(CreatedMetaFields, UUIDModel, DeletedMetaFields): - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) - source_table_name: models.CharField = models.CharField(max_length=400) - source_table_key: models.CharField = models.CharField(max_length=400) - joining_table_name: models.CharField = models.CharField(max_length=400) - joining_table_key: models.CharField = models.CharField(max_length=400) - field_name: models.CharField = models.CharField(max_length=400) + team = models.ForeignKey(Team, on_delete=models.CASCADE) + source_table_name = models.CharField(max_length=400) + source_table_key = models.CharField(max_length=400) + joining_table_name = models.CharField(max_length=400) + joining_table_key = models.CharField(max_length=400) + field_name = models.CharField(max_length=400) def join_function( self, override_source_table_key: Optional[str] = None, override_joining_table_key: Optional[str] = None diff --git a/posthog/warehouse/models/table.py b/posthog/warehouse/models/table.py index a742abc79aaa3..f2ebec5239fd6 100644 --- a/posthog/warehouse/models/table.py +++ b/posthog/warehouse/models/table.py @@ -81,29 +81,23 @@ class TableFormat(models.TextChoices): Delta = "Delta", "Delta" DeltaS3Wrapper = "DeltaS3Wrapper", "DeltaS3Wrapper" - name: models.CharField = models.CharField(max_length=128) - format: models.CharField = models.CharField(max_length=128, choices=TableFormat.choices) - team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) + name = models.CharField(max_length=128) + format = models.CharField(max_length=128, choices=TableFormat.choices) + team = models.ForeignKey(Team, on_delete=models.CASCADE) - url_pattern: models.CharField = models.CharField(max_length=500) - credential: models.ForeignKey = models.ForeignKey( - DataWarehouseCredential, on_delete=models.CASCADE, null=True, blank=True - ) + url_pattern = models.CharField(max_length=500) + credential = models.ForeignKey(DataWarehouseCredential, on_delete=models.CASCADE, null=True, blank=True) - external_data_source: models.ForeignKey = models.ForeignKey( - "ExternalDataSource", on_delete=models.CASCADE, null=True, blank=True - ) + external_data_source = models.ForeignKey("ExternalDataSource", on_delete=models.CASCADE, null=True, blank=True) - columns: models.JSONField = models.JSONField( + columns = models.JSONField( default=dict, null=True, blank=True, help_text="Dict of all columns with Clickhouse type (including Nullable())", ) - row_count: models.IntegerField = models.IntegerField( - null=True, help_text="How many rows are currently synced in this table" - ) + row_count = models.IntegerField(null=True, help_text="How many rows are currently synced in this table") __repr__ = sane_repr("name") diff --git a/production.Dockerfile b/production.Dockerfile index b64293dcb69a8..07906afd9bc4c 100644 --- a/production.Dockerfile +++ b/production.Dockerfile @@ -38,11 +38,12 @@ COPY ./bin/ ./bin/ COPY babel.config.js tsconfig.json webpack.config.js tailwind.config.js ./ RUN pnpm build - # # --------------------------------------------------------- # -FROM node:18.19.1-bullseye-slim AS plugin-server-build +FROM ghcr.io/posthog/rust-node-container:bullseye_rust_1.80.1-node_18.19.1 AS plugin-server-build +WORKDIR /code +COPY ./rust ./rust WORKDIR /code/plugin-server SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"] @@ -182,6 +183,7 @@ COPY --from=plugin-server-build --chown=posthog:posthog /code/plugin-server/dist COPY --from=plugin-server-build --chown=posthog:posthog /code/plugin-server/node_modules /code/plugin-server/node_modules COPY --from=plugin-server-build --chown=posthog:posthog /code/plugin-server/package.json /code/plugin-server/package.json + # Copy the Python dependencies and Django staticfiles from the posthog-build stage. COPY --from=posthog-build --chown=posthog:posthog /code/staticfiles /code/staticfiles COPY --from=posthog-build --chown=posthog:posthog /python-runtime /python-runtime diff --git a/requirements-dev.in b/requirements-dev.in index 5ca5431dbaf1c..8ab3ba93b3a2d 100644 --- a/requirements-dev.in +++ b/requirements-dev.in @@ -11,13 +11,13 @@ -c requirements.txt -ruff~=0.4.10 -mypy~=1.10.0 +ruff~=0.6.1 +mypy~=1.11.1 mypy-baseline~=0.7.0 mypy-extensions==1.0.0 datamodel-code-generator==0.25.6 djangorestframework-stubs~=3.14.5 -django-stubs==4.2.7 +django-stubs==5.0.4 Faker==17.5.0 fakeredis[lua]==2.23.3 freezegun==1.2.2 diff --git a/requirements-dev.txt b/requirements-dev.txt index 938eaead5395c..277f8c9ceb44f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -75,11 +75,9 @@ django==4.2.14 # -c requirements.txt # django-stubs # django-stubs-ext -django-stubs==4.2.7 - # via - # -r requirements-dev.in - # djangorestframework-stubs -django-stubs-ext==5.0.0 +django-stubs==5.0.4 + # via djangorestframework-stubs +django-stubs-ext==5.0.4 # via django-stubs djangorestframework-stubs==3.14.5 # via -r requirements-dev.in @@ -155,8 +153,7 @@ multidict==6.0.2 # -c requirements.txt # aiohttp # yarl -mypy==1.10.0 - # via -r requirements-dev.in +mypy==1.11.1 mypy-baseline==0.7.0 # via -r requirements-dev.in mypy-boto3-s3==1.34.65 @@ -283,8 +280,7 @@ ruamel-yaml==0.18.6 # via prance ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.4.10 - # via -r requirements-dev.in +ruff==0.6.1 six==1.16.0 # via # -c requirements.txt @@ -343,7 +339,7 @@ types-toml==0.10.8.20240310 # via inline-snapshot types-tzlocal==5.1.0.1 # via -r requirements-dev.in -typing-extensions==4.7.1 +typing-extensions==4.12.2 # via # -c requirements.txt # boto3-stubs diff --git a/requirements.in b/requirements.in index e45f1c658ccf2..ac315f2c5e719 100644 --- a/requirements.in +++ b/requirements.in @@ -38,7 +38,7 @@ dlt[deltalake]==0.5.3 dnspython==2.2.1 drf-exceptions-hog==0.4.0 drf-extensions==0.7.0 -drf-spectacular==0.27.1 +drf-spectacular==0.27.2 geoip2==4.6.0 google-cloud-bigquery==3.11.4 gunicorn==20.1.0 @@ -97,7 +97,7 @@ phonenumberslite==8.13.6 openai==1.10.0 tiktoken==0.6.0 nh3==0.2.14 -hogql-parser==1.0.36 +hogql-parser==1.0.38 zxcvbn==4.4.28 zstd==1.5.5.1 xmlsec==1.3.13 # Do not change this version - it will break SAML diff --git a/requirements.txt b/requirements.txt index 151cbcfb2007f..8e64d768b0ec7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -275,7 +275,7 @@ h11==0.13.0 # wsproto hexbytes==1.0.0 # via dlt -hogql-parser==1.0.36 +hogql-parser==1.0.38 # via -r requirements.in httpcore==1.0.2 # via httpx @@ -660,7 +660,7 @@ types-protobuf==4.22.0.0 # via temporalio types-setuptools==69.0.0.0 # via requirements-parser -typing-extensions==4.7.1 +typing-extensions==4.12.2 # via # dlt # openai diff --git a/rust/.cargo/config.toml b/rust/.cargo/config.toml new file mode 100644 index 0000000000000..2b5cb3c5910a0 --- /dev/null +++ b/rust/.cargo/config.toml @@ -0,0 +1,4 @@ +[env] +# Force SQLX to run in offline mode for CI. Devs can change this if they want, to live code against the DB, +# but we use it at the workspace level here to allow use of sqlx macros across all crates +SQLX_OFFLINE = "true" diff --git a/rust/.env b/rust/.env index 43eda2a13040b..d37feead94dcb 100644 --- a/rust/.env +++ b/rust/.env @@ -1 +1 @@ -DATABASE_URL=postgres://posthog:posthog@localhost:15432/test_database +DATABASE_URL=postgres://posthog:posthog@localhost:15432/test_database \ No newline at end of file diff --git a/rust/.sqlx/query-075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503.json b/rust/.sqlx/query-075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503.json new file mode 100644 index 0000000000000..a2cb4e3a0a883 --- /dev/null +++ b/rust/.sqlx/query-075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET last_heartbeat = NOW() WHERE id = $1 AND lock_id = $2", + "describe": { + "columns": [], + "parameters": { + "Left": ["Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503" +} diff --git a/rust/.sqlx/query-16d533b5a15b0b9926a181f578b5b577efe424710b45f02e1ddeece8bca96f87.json b/rust/.sqlx/query-16d533b5a15b0b9926a181f578b5b577efe424710b45f02e1ddeece8bca96f87.json new file mode 100644 index 0000000000000..7a3a8b98d9da5 --- /dev/null +++ b/rust/.sqlx/query-16d533b5a15b0b9926a181f578b5b577efe424710b45f02e1ddeece8bca96f87.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET vm_state = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Text", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "16d533b5a15b0b9926a181f578b5b577efe424710b45f02e1ddeece8bca96f87" +} diff --git a/rust/.sqlx/query-213e9d70e145a01fb42d5c3a80f9126073113a4af03c4c9fd3a81004d898f883.json b/rust/.sqlx/query-213e9d70e145a01fb42d5c3a80f9126073113a4af03c4c9fd3a81004d898f883.json new file mode 100644 index 0000000000000..f9150cfcda3e1 --- /dev/null +++ b/rust/.sqlx/query-213e9d70e145a01fb42d5c3a80f9126073113a4af03c4c9fd3a81004d898f883.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT COUNT(*) FROM cyclotron_jobs WHERE state = 'available' AND scheduled <= NOW()", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [null] + }, + "hash": "213e9d70e145a01fb42d5c3a80f9126073113a4af03c4c9fd3a81004d898f883" +} diff --git a/rust/.sqlx/query-2b62adf40f8dd5758690c763df30fffa01b679951ec786f8ee2410454b9a2de0.json b/rust/.sqlx/query-2b62adf40f8dd5758690c763df30fffa01b679951ec786f8ee2410454b9a2de0.json new file mode 100644 index 0000000000000..3c2761eccb0a5 --- /dev/null +++ b/rust/.sqlx/query-2b62adf40f8dd5758690c763df30fffa01b679951ec786f8ee2410454b9a2de0.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET queue_name = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Text", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "2b62adf40f8dd5758690c763df30fffa01b679951ec786f8ee2410454b9a2de0" +} diff --git a/rust/.sqlx/query-2bd3251126625d8dd5143f58f4f9c4bbd0c3a17b7ea65767cf5e7512e5a6ea89.json b/rust/.sqlx/query-2bd3251126625d8dd5143f58f4f9c4bbd0c3a17b7ea65767cf5e7512e5a6ea89.json new file mode 100644 index 0000000000000..cfcbdd6288f56 --- /dev/null +++ b/rust/.sqlx/query-2bd3251126625d8dd5143f58f4f9c4bbd0c3a17b7ea65767cf5e7512e5a6ea89.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET state = 'running', lock_id = $1, last_heartbeat=NOW() WHERE id = $2 returning queue_name", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "queue_name", + "type_info": "Text" + } + ], + "parameters": { + "Left": ["Uuid", "Uuid"] + }, + "nullable": [false] + }, + "hash": "2bd3251126625d8dd5143f58f4f9c4bbd0c3a17b7ea65767cf5e7512e5a6ea89" +} diff --git a/rust/.sqlx/query-2ca9ea5e8706bba21b14d9a349f3d0e39f01b19b243d724b09f3ce6617d03dc7.json b/rust/.sqlx/query-2ca9ea5e8706bba21b14d9a349f3d0e39f01b19b243d724b09f3ce6617d03dc7.json new file mode 100644 index 0000000000000..e69786b54b25e --- /dev/null +++ b/rust/.sqlx/query-2ca9ea5e8706bba21b14d9a349f3d0e39f01b19b243d724b09f3ce6617d03dc7.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET state = 'available', lock_id = NULL, queue_name = $1 WHERE id = $2", + "describe": { + "columns": [], + "parameters": { + "Left": ["Text", "Uuid"] + }, + "nullable": [] + }, + "hash": "2ca9ea5e8706bba21b14d9a349f3d0e39f01b19b243d724b09f3ce6617d03dc7" +} diff --git a/rust/.sqlx/query-2f6de0977357909dfd8d3d510c39a284f16421f77b77fe38e67143f28e270805.json b/rust/.sqlx/query-2f6de0977357909dfd8d3d510c39a284f16421f77b77fe38e67143f28e270805.json new file mode 100644 index 0000000000000..b0e1ef221041f --- /dev/null +++ b/rust/.sqlx/query-2f6de0977357909dfd8d3d510c39a284f16421f77b77fe38e67143f28e270805.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET priority = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Int2", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "2f6de0977357909dfd8d3d510c39a284f16421f77b77fe38e67143f28e270805" +} diff --git a/rust/.sqlx/query-350983ef271029734aff70eb7e298bfe578ecaa8678268863bce917ced9d5d46.json b/rust/.sqlx/query-350983ef271029734aff70eb7e298bfe578ecaa8678268863bce917ced9d5d46.json new file mode 100644 index 0000000000000..d3a54ba7ef247 --- /dev/null +++ b/rust/.sqlx/query-350983ef271029734aff70eb7e298bfe578ecaa8678268863bce917ced9d5d46.json @@ -0,0 +1,117 @@ +{ + "db_name": "PostgreSQL", + "query": "\nWITH available AS (\n SELECT\n id,\n state\n FROM cyclotron_jobs\n WHERE\n state = 'available'::JobState\n AND queue_name = $1\n AND scheduled <= NOW()\n ORDER BY\n priority ASC,\n scheduled ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n)\nUPDATE cyclotron_jobs\nSET\n state = 'running'::JobState,\n lock_id = $3,\n last_heartbeat = NOW(),\n last_transition = NOW(),\n transition_count = transition_count + 1\nFROM available\nWHERE\n cyclotron_jobs.id = available.id\nRETURNING\n cyclotron_jobs.id,\n team_id,\n available.state as \"state: JobState\",\n queue_name,\n priority,\n function_id,\n created,\n last_transition,\n scheduled,\n transition_count,\n NULL as vm_state,\n metadata,\n parameters,\n lock_id,\n last_heartbeat,\n janitor_touch_count\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "team_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "state: JobState", + "type_info": { + "Custom": { + "name": "jobstate", + "kind": { + "Enum": ["available", "completed", "failed", "running", "paused"] + } + } + } + }, + { + "ordinal": 3, + "name": "queue_name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "priority", + "type_info": "Int2" + }, + { + "ordinal": 5, + "name": "function_id", + "type_info": "Uuid" + }, + { + "ordinal": 6, + "name": "created", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "last_transition", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "scheduled", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "transition_count", + "type_info": "Int2" + }, + { + "ordinal": 10, + "name": "vm_state", + "type_info": "Text" + }, + { + "ordinal": 11, + "name": "metadata", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "parameters", + "type_info": "Text" + }, + { + "ordinal": 13, + "name": "lock_id", + "type_info": "Uuid" + }, + { + "ordinal": 14, + "name": "last_heartbeat", + "type_info": "Timestamptz" + }, + { + "ordinal": 15, + "name": "janitor_touch_count", + "type_info": "Int2" + } + ], + "parameters": { + "Left": ["Text", "Int8", "Uuid"] + }, + "nullable": [ + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + null, + true, + true, + true, + true, + false + ] + }, + "hash": "350983ef271029734aff70eb7e298bfe578ecaa8678268863bce917ced9d5d46" +} diff --git a/rust/.sqlx/query-385e94f4adab0f85174968f6eee873bf6d1d43884cd628df5b36978dd761b025.json b/rust/.sqlx/query-385e94f4adab0f85174968f6eee873bf6d1d43884cd628df5b36978dd761b025.json new file mode 100644 index 0000000000000..5c6b66d3f8739 --- /dev/null +++ b/rust/.sqlx/query-385e94f4adab0f85174968f6eee873bf6d1d43884cd628df5b36978dd761b025.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\nSELECT id FROM cyclotron_jobs WHERE state = 'running' AND COALESCE(last_heartbeat, $1) <= $1 AND janitor_touch_count >= $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + } + ], + "parameters": { + "Left": ["Timestamptz", "Int2"] + }, + "nullable": [false] + }, + "hash": "385e94f4adab0f85174968f6eee873bf6d1d43884cd628df5b36978dd761b025" +} diff --git a/rust/.sqlx/query-54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d.json b/rust/.sqlx/query-54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d.json new file mode 100644 index 0000000000000..2ff58c66714a1 --- /dev/null +++ b/rust/.sqlx/query-54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "\nWITH stalled AS (\n SELECT id FROM cyclotron_jobs WHERE state = 'running' AND COALESCE(last_heartbeat, $1) <= $1 FOR UPDATE SKIP LOCKED\n)\nUPDATE cyclotron_jobs\nSET state = 'available', lock_id = NULL, last_heartbeat = NULL, janitor_touch_count = janitor_touch_count + 1\nFROM stalled\nWHERE cyclotron_jobs.id = stalled.id\n ", + "describe": { + "columns": [], + "parameters": { + "Left": ["Timestamptz"] + }, + "nullable": [] + }, + "hash": "54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d" +} diff --git a/rust/.sqlx/query-7217e766aeb53298238222c0c71a2ce446cac731845c53cb926fc47ace708dd6.json b/rust/.sqlx/query-7217e766aeb53298238222c0c71a2ce446cac731845c53cb926fc47ace708dd6.json new file mode 100644 index 0000000000000..230374e98d610 --- /dev/null +++ b/rust/.sqlx/query-7217e766aeb53298238222c0c71a2ce446cac731845c53cb926fc47ace708dd6.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\nINSERT INTO cyclotron_jobs\n (\n id,\n team_id,\n function_id,\n created,\n lock_id,\n last_heartbeat,\n janitor_touch_count,\n transition_count,\n last_transition,\n queue_name,\n state,\n scheduled,\n priority,\n vm_state,\n metadata,\n parameters\n )\nVALUES\n ($1, $2, $3, NOW(), NULL, NULL, 0, 0, NOW(), $4, $5, $6, $7, $8, $9, $10)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Int4", + "Uuid", + "Text", + { + "Custom": { + "name": "jobstate", + "kind": { + "Enum": ["available", "completed", "failed", "running", "paused"] + } + } + }, + "Timestamptz", + "Int2", + "Text", + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "7217e766aeb53298238222c0c71a2ce446cac731845c53cb926fc47ace708dd6" +} diff --git a/rust/.sqlx/query-78f54fcebc11e2411008448281e4711bdfb8cf78e362ccda8bc14e92324d51f8.json b/rust/.sqlx/query-78f54fcebc11e2411008448281e4711bdfb8cf78e362ccda8bc14e92324d51f8.json new file mode 100644 index 0000000000000..d70d4c9d33a43 --- /dev/null +++ b/rust/.sqlx/query-78f54fcebc11e2411008448281e4711bdfb8cf78e362ccda8bc14e92324d51f8.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT COUNT(*) FROM cyclotron_jobs WHERE queue_name = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": ["Text"] + }, + "nullable": [null] + }, + "hash": "78f54fcebc11e2411008448281e4711bdfb8cf78e362ccda8bc14e92324d51f8" +} diff --git a/rust/.sqlx/query-884da9767d2992c7b279b4f8df5129b8c4d7020b7cb1999702aee1bfb1087efb.json b/rust/.sqlx/query-884da9767d2992c7b279b4f8df5129b8c4d7020b7cb1999702aee1bfb1087efb.json new file mode 100644 index 0000000000000..b728d398568c5 --- /dev/null +++ b/rust/.sqlx/query-884da9767d2992c7b279b4f8df5129b8c4d7020b7cb1999702aee1bfb1087efb.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET lock_id = NULL, last_heartbeat = NULL WHERE id = $1 AND lock_id = $2", + "describe": { + "columns": [], + "parameters": { + "Left": ["Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "884da9767d2992c7b279b4f8df5129b8c4d7020b7cb1999702aee1bfb1087efb" +} diff --git a/rust/.sqlx/query-8ab11a89bc4720985e130c58021f46045c332cc45e69b08708b289cc933b3b5c.json b/rust/.sqlx/query-8ab11a89bc4720985e130c58021f46045c332cc45e69b08708b289cc933b3b5c.json new file mode 100644 index 0000000000000..8c3a3dbde8b62 --- /dev/null +++ b/rust/.sqlx/query-8ab11a89bc4720985e130c58021f46045c332cc45e69b08708b289cc933b3b5c.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET metadata = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Text", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "8ab11a89bc4720985e130c58021f46045c332cc45e69b08708b289cc933b3b5c" +} diff --git a/rust/.sqlx/query-98da1f12285a97a47ce88535c82a8c4eb4cb910b0c5ddbc6bdbd9156af7b4e23.json b/rust/.sqlx/query-98da1f12285a97a47ce88535c82a8c4eb4cb910b0c5ddbc6bdbd9156af7b4e23.json new file mode 100644 index 0000000000000..59a56c441cb7c --- /dev/null +++ b/rust/.sqlx/query-98da1f12285a97a47ce88535c82a8c4eb4cb910b0c5ddbc6bdbd9156af7b4e23.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET scheduled = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Timestamptz", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "98da1f12285a97a47ce88535c82a8c4eb4cb910b0c5ddbc6bdbd9156af7b4e23" +} diff --git a/rust/.sqlx/query-aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632.json b/rust/.sqlx/query-aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632.json new file mode 100644 index 0000000000000..bd8a7cdd90282 --- /dev/null +++ b/rust/.sqlx/query-aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT vm_state FROM cyclotron_jobs WHERE id = $1 AND lock_id = $2", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "vm_state", + "type_info": "Text" + } + ], + "parameters": { + "Left": ["Uuid", "Uuid"] + }, + "nullable": [true] + }, + "hash": "aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632" +} diff --git a/rust/.sqlx/query-b160b785a0377b854341105e99e4ef7a5da523e168a5f9be47f6caaef09487d7.json b/rust/.sqlx/query-b160b785a0377b854341105e99e4ef7a5da523e168a5f9be47f6caaef09487d7.json new file mode 100644 index 0000000000000..ea9c7f8fceb06 --- /dev/null +++ b/rust/.sqlx/query-b160b785a0377b854341105e99e4ef7a5da523e168a5f9be47f6caaef09487d7.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET parameters = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Text", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "b160b785a0377b854341105e99e4ef7a5da523e168a5f9be47f6caaef09487d7" +} diff --git a/rust/.sqlx/query-b3239c1dde9a88769ec488299612f7a3ad2b2ee57d8d2353c858299d7b6ffe13.json b/rust/.sqlx/query-b3239c1dde9a88769ec488299612f7a3ad2b2ee57d8d2353c858299d7b6ffe13.json new file mode 100644 index 0000000000000..d2942f91b1930 --- /dev/null +++ b/rust/.sqlx/query-b3239c1dde9a88769ec488299612f7a3ad2b2ee57d8d2353c858299d7b6ffe13.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs\n SET state = $1, last_transition = NOW(), transition_count = transition_count + 1\n WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": [ + { + "Custom": { + "name": "jobstate", + "kind": { + "Enum": ["available", "completed", "failed", "running", "paused"] + } + } + }, + "Uuid", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "b3239c1dde9a88769ec488299612f7a3ad2b2ee57d8d2353c858299d7b6ffe13" +} diff --git a/rust/.sqlx/query-b8c1b723826d595dca0389d729fa76bd8a7d96d73983a0c408f32f17da5f483b.json b/rust/.sqlx/query-b8c1b723826d595dca0389d729fa76bd8a7d96d73983a0c408f32f17da5f483b.json new file mode 100644 index 0000000000000..8f201d80503ce --- /dev/null +++ b/rust/.sqlx/query-b8c1b723826d595dca0389d729fa76bd8a7d96d73983a0c408f32f17da5f483b.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "INSERT INTO cyclotron_dead_letter_metadata (job_id, original_queue_name, reason, dlq_time) VALUES ($1, $2, $3, NOW())", + "describe": { + "columns": [], + "parameters": { + "Left": ["Uuid", "Text", "Text"] + }, + "nullable": [] + }, + "hash": "b8c1b723826d595dca0389d729fa76bd8a7d96d73983a0c408f32f17da5f483b" +} diff --git a/rust/.sqlx/query-c624261597b9356ff3e7c3e392a84bb0b551e91c503e8b21c29814f1eb660a8e.json b/rust/.sqlx/query-c624261597b9356ff3e7c3e392a84bb0b551e91c503e8b21c29814f1eb660a8e.json new file mode 100644 index 0000000000000..b94965873e7d6 --- /dev/null +++ b/rust/.sqlx/query-c624261597b9356ff3e7c3e392a84bb0b551e91c503e8b21c29814f1eb660a8e.json @@ -0,0 +1,117 @@ +{ + "db_name": "PostgreSQL", + "query": "\nWITH available AS (\n SELECT\n id,\n state\n FROM cyclotron_jobs\n WHERE\n state = 'available'::JobState\n AND queue_name = $1\n AND scheduled <= NOW()\n ORDER BY\n priority ASC,\n scheduled ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n)\nUPDATE cyclotron_jobs\nSET\n state = 'running'::JobState,\n lock_id = $3,\n last_heartbeat = NOW(),\n last_transition = NOW(),\n transition_count = transition_count + 1\nFROM available\nWHERE\n cyclotron_jobs.id = available.id\nRETURNING\n cyclotron_jobs.id,\n team_id,\n available.state as \"state: JobState\",\n queue_name,\n priority,\n function_id,\n created,\n last_transition,\n scheduled,\n transition_count,\n vm_state,\n metadata,\n parameters,\n lock_id,\n last_heartbeat,\n janitor_touch_count\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "team_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "state: JobState", + "type_info": { + "Custom": { + "name": "jobstate", + "kind": { + "Enum": ["available", "completed", "failed", "running", "paused"] + } + } + } + }, + { + "ordinal": 3, + "name": "queue_name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "priority", + "type_info": "Int2" + }, + { + "ordinal": 5, + "name": "function_id", + "type_info": "Uuid" + }, + { + "ordinal": 6, + "name": "created", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "last_transition", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "scheduled", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "transition_count", + "type_info": "Int2" + }, + { + "ordinal": 10, + "name": "vm_state", + "type_info": "Text" + }, + { + "ordinal": 11, + "name": "metadata", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "parameters", + "type_info": "Text" + }, + { + "ordinal": 13, + "name": "lock_id", + "type_info": "Uuid" + }, + { + "ordinal": 14, + "name": "last_heartbeat", + "type_info": "Timestamptz" + }, + { + "ordinal": 15, + "name": "janitor_touch_count", + "type_info": "Int2" + } + ], + "parameters": { + "Left": ["Text", "Int8", "Uuid"] + }, + "nullable": [ + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + true, + true, + true, + true, + true, + false + ] + }, + "hash": "c624261597b9356ff3e7c3e392a84bb0b551e91c503e8b21c29814f1eb660a8e" +} diff --git a/rust/.sqlx/query-f4e808f58dd290c6e2b49b63e9e0eb022936ba318021512a0cc0c2e0766abe7c.json b/rust/.sqlx/query-f4e808f58dd290c6e2b49b63e9e0eb022936ba318021512a0cc0c2e0766abe7c.json new file mode 100644 index 0000000000000..a585e9f7e7d73 --- /dev/null +++ b/rust/.sqlx/query-f4e808f58dd290c6e2b49b63e9e0eb022936ba318021512a0cc0c2e0766abe7c.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM cyclotron_jobs WHERE state = 'completed'", + "describe": { + "columns": [], + "parameters": { + "Left": [] + }, + "nullable": [] + }, + "hash": "f4e808f58dd290c6e2b49b63e9e0eb022936ba318021512a0cc0c2e0766abe7c" +} diff --git a/rust/.sqlx/query-ffb66bdedf6506f95b9293ef88b0c51e2f5fb7d3271e1287165d2a35b6aaa25e.json b/rust/.sqlx/query-ffb66bdedf6506f95b9293ef88b0c51e2f5fb7d3271e1287165d2a35b6aaa25e.json new file mode 100644 index 0000000000000..605d79d57c098 --- /dev/null +++ b/rust/.sqlx/query-ffb66bdedf6506f95b9293ef88b0c51e2f5fb7d3271e1287165d2a35b6aaa25e.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM cyclotron_jobs WHERE state = 'failed'", + "describe": { + "columns": [], + "parameters": { + "Left": [] + }, + "nullable": [] + }, + "hash": "ffb66bdedf6506f95b9293ef88b0c51e2f5fb7d3271e1287165d2a35b6aaa25e" +} diff --git a/rust/Cargo.lock b/rust/Cargo.lock index 923fbc2928641..6c15916314724 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -665,14 +665,15 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.33" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f13690e35a5e4ace198e7beea2895d29f3a9cc55015fcebe6336bd2010af9eb" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", + "serde", "wasm-bindgen", "windows-targets 0.52.0", ] @@ -700,6 +701,25 @@ dependencies = [ "tokio-util", ] +[[package]] +name = "common-dns" +version = "0.1.0" +dependencies = [ + "futures", + "reqwest 0.12.3", + "tokio", +] + +[[package]] +name = "common-metrics" +version = "0.1.0" +dependencies = [ + "axum 0.7.5", + "metrics", + "metrics-exporter-prometheus", + "tokio", +] + [[package]] name = "concurrent-queue" version = "2.5.0" @@ -819,6 +839,78 @@ dependencies = [ "typenum", ] +[[package]] +name = "cyclotron-core" +version = "0.1.0" +dependencies = [ + "chrono", + "futures", + "rand", + "serde", + "sqlx", + "thiserror", + "tokio", + "uuid", +] + +[[package]] +name = "cyclotron-fetch" +version = "0.1.0" +dependencies = [ + "axum 0.7.5", + "chrono", + "common-dns", + "common-metrics", + "cyclotron-core", + "envconfig", + "futures", + "health", + "http 1.1.0", + "httpmock", + "rand", + "reqwest 0.12.3", + "serde", + "serde_json", + "sqlx", + "thiserror", + "tokio", + "tracing", + "tracing-subscriber", + "uuid", +] + +[[package]] +name = "cyclotron-janitor" +version = "0.1.0" +dependencies = [ + "axum 0.7.5", + "chrono", + "common-metrics", + "cyclotron-core", + "envconfig", + "eyre", + "health", + "sqlx", + "tokio", + "tracing", + "tracing-subscriber", + "uuid", +] + +[[package]] +name = "cyclotron-node" +version = "0.1.0" +dependencies = [ + "chrono", + "cyclotron-core", + "neon", + "once_cell", + "serde", + "serde_json", + "tokio", + "uuid", +] + [[package]] name = "dashmap" version = "5.5.3" @@ -1040,6 +1132,7 @@ dependencies = [ "axum-client-ip", "bytes", "envconfig", + "maxminddb", "once_cell", "rand", "redis", @@ -1468,6 +1561,7 @@ name = "hook-api" version = "0.1.0" dependencies = [ "axum 0.7.5", + "common-metrics", "envconfig", "eyre", "hook-common", @@ -1489,13 +1583,10 @@ name = "hook-common" version = "0.1.0" dependencies = [ "async-trait", - "axum 0.7.5", "chrono", "envconfig", "health", "http 1.1.0", - "metrics", - "metrics-exporter-prometheus", "rdkafka", "reqwest 0.12.3", "serde", @@ -1514,6 +1605,7 @@ version = "0.1.0" dependencies = [ "async-trait", "axum 0.7.5", + "common-metrics", "envconfig", "eyre", "futures", @@ -1537,6 +1629,8 @@ version = "0.1.0" dependencies = [ "axum 0.7.5", "chrono", + "common-dns", + "common-metrics", "envconfig", "futures", "health", @@ -1944,6 +2038,16 @@ version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +[[package]] +name = "libloading" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" +dependencies = [ + "cfg-if", + "windows-targets 0.52.0", +] + [[package]] name = "libm" version = "0.2.8" @@ -2038,6 +2142,16 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" +[[package]] +name = "maxminddb" +version = "0.17.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d13fa57adcc4f3aca91e511b3cdaa58ed8cbcbf97f20e342a11218c76e127f51" +dependencies = [ + "log", + "serde", +] + [[package]] name = "md-5" version = "0.10.6" @@ -2160,6 +2274,32 @@ dependencies = [ "tempfile", ] +[[package]] +name = "neon" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d75440242411c87dc39847b0e33e961ec1f10326a9d8ecf9c1ea64a3b3c13dc" +dependencies = [ + "getrandom", + "libloading", + "neon-macros", + "once_cell", + "semver", + "send_wrapper", + "smallvec", +] + +[[package]] +name = "neon-macros" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6813fde79b646e47e7ad75f480aa80ef76a5d9599e2717407961531169ee38b" +dependencies = [ + "quote", + "syn 2.0.48", + "syn-mid", +] + [[package]] name = "new_debug_unreachable" version = "1.0.6" @@ -2676,6 +2816,28 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "property-defs-rs" +version = "0.1.0" +dependencies = [ + "axum 0.7.5", + "chrono", + "envconfig", + "futures", + "health", + "metrics", + "rdkafka", + "serde", + "serde_json", + "serve-metrics", + "sqlx", + "time", + "tokio", + "tracing", + "tracing-subscriber", + "uuid", +] + [[package]] name = "prost" version = "0.12.4" @@ -3181,6 +3343,18 @@ dependencies = [ "libc", ] +[[package]] +name = "semver" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" + +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + [[package]] name = "serde" version = "1.0.196" @@ -3257,6 +3431,16 @@ dependencies = [ "serde", ] +[[package]] +name = "serve-metrics" +version = "0.1.0" +dependencies = [ + "axum 0.7.5", + "metrics", + "metrics-exporter-prometheus", + "tokio", +] + [[package]] name = "sha1" version = "0.10.6" @@ -3660,6 +3844,17 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "syn-mid" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5dc35bb08dd1ca3dfb09dce91fd2d13294d6711c88897d9a9d60acf39bce049" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + [[package]] name = "sync_wrapper" version = "0.1.2" diff --git a/rust/Cargo.toml b/rust/Cargo.toml index eae4b047f3dbf..347530f99d1bd 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -2,13 +2,20 @@ resolver = "2" members = [ + "property-defs-rs", "capture", "common/health", + "common/metrics", + "common/dns", "feature-flags", "hook-api", "hook-common", "hook-janitor", "hook-worker", + "cyclotron-core", + "cyclotron-node", + "cyclotron-janitor", + "cyclotron-fetch", ] [workspace.lints.rust] @@ -34,7 +41,7 @@ axum = { version = "0.7.5", features = ["http2", "macros", "matched-path"] } axum-client-ip = "0.6.0" base64 = "0.22.0" bytes = "1" -chrono = { version = "0.4" } +chrono = { version = "0.4.38", features = ["default", "serde"] } envconfig = "0.10.0" eyre = "0.6.9" flate2 = "1.0" @@ -80,3 +87,4 @@ tracing-opentelemetry = "0.23.0" tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } url = { version = "2.5.0 " } uuid = { version = "1.6.1", features = ["v7", "serde"] } +neon = "1" \ No newline at end of file diff --git a/rust/Dockerfile b/rust/Dockerfile index a6c59b11a0e33..a19bd3a74ae0d 100644 --- a/rust/Dockerfile +++ b/rust/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/lukemathwalker/cargo-chef:latest-rust-1.77-bookworm AS chef +FROM docker.io/lukemathwalker/cargo-chef:latest-rust-1.80.1-bookworm AS chef ARG BIN WORKDIR /app diff --git a/rust/Dockerfile.migrate b/rust/Dockerfile.migrate-hooks similarity index 87% rename from rust/Dockerfile.migrate rename to rust/Dockerfile.migrate-hooks index e7fc120360b79..338c9b2368e22 100644 --- a/rust/Dockerfile.migrate +++ b/rust/Dockerfile.migrate-hooks @@ -1,4 +1,4 @@ -FROM docker.io/library/rust:1.74.0-buster as builder +FROM docker.io/library/rust:1.80.1-bullseye as builder RUN apt update && apt install build-essential cmake -y RUN cargo install sqlx-cli@0.7.3 --no-default-features --features native-tls,postgres --root /app/target/release/ diff --git a/rust/capture/src/api.rs b/rust/capture/src/api.rs index 646bd2d3c9c8b..c7441d97f5c42 100644 --- a/rust/capture/src/api.rs +++ b/rust/capture/src/api.rs @@ -15,6 +15,9 @@ pub enum CaptureResponseCode { #[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] pub struct CaptureResponse { pub status: CaptureResponseCode, + + #[serde(skip_serializing_if = "Option::is_none")] + pub quota_limited: Option>, } #[derive(Error, Debug)] @@ -32,6 +35,14 @@ pub enum CaptureError { EmptyDistinctId, #[error("event submitted without a distinct_id")] MissingDistinctId, + #[error("replay event submitted without snapshot data")] + MissingSnapshotData, + #[error("replay event submitted without session id")] + MissingSessionId, + #[error("replay event submitted without window id")] + MissingWindowId, + #[error("replay event has invalid session id")] + InvalidSessionId, #[error("event submitted without an api_key")] NoTokenError, @@ -64,7 +75,11 @@ impl IntoResponse for CaptureError { | CaptureError::EmptyDistinctId | CaptureError::MissingDistinctId | CaptureError::EventTooBig - | CaptureError::NonRetryableSinkError => (StatusCode::BAD_REQUEST, self.to_string()), + | CaptureError::NonRetryableSinkError + | CaptureError::MissingSessionId + | CaptureError::MissingWindowId + | CaptureError::InvalidSessionId + | CaptureError::MissingSnapshotData => (StatusCode::BAD_REQUEST, self.to_string()), CaptureError::NoTokenError | CaptureError::MultipleTokensError @@ -87,6 +102,7 @@ pub enum DataType { ClientIngestionWarning, HeatmapMain, ExceptionMain, + SnapshotMain, } #[derive(Clone, Debug, Serialize, Eq, PartialEq)] pub struct ProcessedEvent { @@ -103,6 +119,8 @@ pub struct ProcessedEvent { )] pub sent_at: Option, pub token: String, + #[serde(skip_serializing)] + pub session_id: Option, } impl ProcessedEvent { diff --git a/rust/capture/src/config.rs b/rust/capture/src/config.rs index 6c66d09e68454..c6ca77461d11c 100644 --- a/rust/capture/src/config.rs +++ b/rust/capture/src/config.rs @@ -2,6 +2,24 @@ use std::{net::SocketAddr, num::NonZeroU32}; use envconfig::Envconfig; +#[derive(Debug, PartialEq, Clone)] +pub enum CaptureMode { + Events, + Recordings, +} + +impl std::str::FromStr for CaptureMode { + type Err = String; + + fn from_str(s: &str) -> Result { + match s.trim().to_lowercase().as_ref() { + "events" => Ok(CaptureMode::Events), + "recordings" => Ok(CaptureMode::Recordings), + _ => Err(format!("Unknown Capture Type: {s}")), + } + } +} + #[derive(Envconfig, Clone)] pub struct Config { #[envconfig(default = "false")] @@ -37,6 +55,9 @@ pub struct Config { #[envconfig(default = "true")] pub export_prometheus: bool, pub redis_key_prefix: Option, + + #[envconfig(default = "events")] + pub capture_mode: CaptureMode, } #[derive(Envconfig, Clone)] @@ -47,6 +68,8 @@ pub struct KafkaConfig { pub kafka_producer_queue_mib: u32, // Size of the in-memory producer queue in mebibytes #[envconfig(default = "20000")] pub kafka_message_timeout_ms: u32, // Time before we stop retrying producing a message: 20 seconds + #[envconfig(default = "1000000")] + pub kafka_producer_message_max_bytes: u32, // message.max.bytes - max kafka message size we will produce #[envconfig(default = "none")] pub kafka_compression_codec: String, // none, gzip, snappy, lz4, zstd pub kafka_hosts: String, diff --git a/rust/capture/src/router.rs b/rust/capture/src/router.rs index c4d91bf202c13..07577f37fcdbc 100644 --- a/rust/capture/src/router.rs +++ b/rust/capture/src/router.rs @@ -15,10 +15,12 @@ use crate::{ limiters::billing::BillingLimiter, redis::Client, sinks, time::TimeSource, v0_endpoint, }; +use crate::config::CaptureMode; use crate::prometheus::{setup_metrics_recorder, track_metrics}; const EVENT_BODY_SIZE: usize = 2 * 1024 * 1024; // 2MB const BATCH_BODY_SIZE: usize = 20 * 1024 * 1024; // 20MB, up from the default 2MB used for normal event payloads +const RECORDING_BODY_SIZE: usize = 20 * 1024 * 1024; // 20MB, up from the default 2MB used for normal event payloads #[derive(Clone)] pub struct State { @@ -43,6 +45,7 @@ pub fn router< redis: Arc, billing: BillingLimiter, metrics: bool, + capture_mode: CaptureMode, ) -> Router { let state = State { sink: Arc::new(sink), @@ -106,14 +109,30 @@ pub fn router< .route("/_readiness", get(index)) .route("/_liveness", get(move || ready(liveness.get_status()))); - let router = Router::new() - .merge(batch_router) - .merge(event_router) - .merge(status_router) - .layer(TraceLayer::new_for_http()) - .layer(cors) - .layer(axum::middleware::from_fn(track_metrics)) - .with_state(state); + let recordings_router = Router::new() + .route( + "/s", + post(v0_endpoint::recording) + .get(v0_endpoint::recording) + .options(v0_endpoint::options), + ) + .route( + "/s/", + post(v0_endpoint::recording) + .get(v0_endpoint::recording) + .options(v0_endpoint::options), + ) + .layer(DefaultBodyLimit::max(RECORDING_BODY_SIZE)); + + let router = match capture_mode { + CaptureMode::Events => Router::new().merge(batch_router).merge(event_router), + CaptureMode::Recordings => Router::new().merge(recordings_router), + } + .merge(status_router) + .layer(TraceLayer::new_for_http()) + .layer(cors) + .layer(axum::middleware::from_fn(track_metrics)) + .with_state(state); // Don't install metrics unless asked to // Installing a global recorder when capture is used as a library (during tests etc) diff --git a/rust/capture/src/server.rs b/rust/capture/src/server.rs index 5b8cf06c86842..18cf0205d64a8 100644 --- a/rust/capture/src/server.rs +++ b/rust/capture/src/server.rs @@ -46,6 +46,7 @@ where redis_client, billing, config.export_prometheus, + config.capture_mode, ) } else { let sink_liveness = liveness @@ -86,6 +87,7 @@ where redis_client, billing, config.export_prometheus, + config.capture_mode, ) }; diff --git a/rust/capture/src/sinks/kafka.rs b/rust/capture/src/sinks/kafka.rs index 760c6f31740ba..498b2b71e837a 100644 --- a/rust/capture/src/sinks/kafka.rs +++ b/rust/capture/src/sinks/kafka.rs @@ -4,6 +4,7 @@ use async_trait::async_trait; use health::HealthHandle; use metrics::{counter, gauge, histogram}; use rdkafka::error::{KafkaError, RDKafkaErrorCode}; +use rdkafka::message::{Header, OwnedHeaders}; use rdkafka::producer::{DeliveryFuture, FutureProducer, FutureRecord, Producer}; use rdkafka::util::Timeout; use rdkafka::ClientConfig; @@ -129,6 +130,10 @@ impl KafkaSink { .set("statistics.interval.ms", "10000") .set("partitioner", "murmur2_random") // Compatibility with python-kafka .set("linger.ms", config.kafka_producer_linger_ms.to_string()) + .set( + "message.max.bytes", + config.kafka_producer_message_max_bytes.to_string(), + ) .set( "message.timeout.ms", config.kafka_message_timeout_ms.to_string(), @@ -179,6 +184,8 @@ impl KafkaSink { })?; let event_key = event.key(); + let session_id = event.session_id.as_deref(); + let (topic, partition_key): (&str, Option<&str>) = match &event.data_type { DataType::AnalyticsHistorical => (&self.historical_topic, Some(event_key.as_str())), // We never trigger overflow on historical events DataType::AnalyticsMain => { @@ -199,6 +206,10 @@ impl KafkaSink { ), DataType::HeatmapMain => (&self.heatmaps_topic, Some(event_key.as_str())), DataType::ExceptionMain => (&self.exceptions_topic, Some(event_key.as_str())), + DataType::SnapshotMain => ( + &self.main_topic, + Some(session_id.ok_or(CaptureError::MissingSessionId)?), + ), }; match self.producer.send_result(FutureRecord { @@ -207,7 +218,10 @@ impl KafkaSink { partition: None, key: partition_key, timestamp: None, - headers: None, + headers: Some(OwnedHeaders::new().insert(Header { + key: "token", + value: Some(&event.token), + })), }) { Ok(ack) => Ok(ack), Err((e, _)) => match e.rdkafka_error_code() { @@ -318,7 +332,9 @@ mod tests { use std::num::NonZeroU32; use time::Duration; - async fn start_on_mocked_sink() -> (MockCluster<'static, DefaultProducerContext>, KafkaSink) { + async fn start_on_mocked_sink( + message_max_bytes: Option, + ) -> (MockCluster<'static, DefaultProducerContext>, KafkaSink) { let registry = HealthRegistry::new("liveness"); let handle = registry .register("one".to_string(), Duration::seconds(30)) @@ -333,6 +349,7 @@ mod tests { kafka_producer_linger_ms: 0, kafka_producer_queue_mib: 50, kafka_message_timeout_ms: 500, + kafka_producer_message_max_bytes: message_max_bytes.unwrap_or(1000000), kafka_compression_codec: "none".to_string(), kafka_hosts: cluster.bootstrap_servers(), kafka_topic: "events_plugin_ingestion".to_string(), @@ -351,7 +368,7 @@ mod tests { // Uses a mocked Kafka broker that allows injecting write errors, to check error handling. // We test different cases in a single test to amortize the startup cost of the producer. - let (cluster, sink) = start_on_mocked_sink().await; + let (cluster, sink) = start_on_mocked_sink(Some(3000000)).await; let event: ProcessedEvent = ProcessedEvent { data_type: DataType::AnalyticsMain, uuid: uuid_v7(), @@ -361,6 +378,7 @@ mod tests { now: "".to_string(), sent_at: None, token: "token1".to_string(), + session_id: None, }; // Wait for producer to be healthy, to keep kafka_message_timeout_ms short and tests faster @@ -378,7 +396,7 @@ mod tests { .await .expect("failed to send initial event batch"); - // Producer should reject a 2MB message, twice the default `message.max.bytes` + // Producer should accept a 2MB message as we set message.max.bytes to 3MB let big_data = rand::thread_rng() .sample_iter(Alphanumeric) .take(2_000_000) @@ -393,6 +411,28 @@ mod tests { now: "".to_string(), sent_at: None, token: "token1".to_string(), + session_id: None, + }; + sink.send(big_event) + .await + .expect("failed to send event larger than default max size"); + + // Producer should reject a 4MB message + let big_data = rand::thread_rng() + .sample_iter(Alphanumeric) + .take(4_000_000) + .map(char::from) + .collect(); + let big_event: ProcessedEvent = ProcessedEvent { + data_type: DataType::AnalyticsMain, + uuid: uuid_v7(), + distinct_id: "id1".to_string(), + ip: "".to_string(), + data: big_data, + now: "".to_string(), + sent_at: None, + token: "token1".to_string(), + session_id: None, }; match sink.send(big_event).await { Err(CaptureError::EventTooBig) => {} // Expected diff --git a/rust/capture/src/v0_endpoint.rs b/rust/capture/src/v0_endpoint.rs index 83290e1016742..a935ab8189c19 100644 --- a/rust/capture/src/v0_endpoint.rs +++ b/rust/capture/src/v0_endpoint.rs @@ -9,6 +9,8 @@ use axum::http::{HeaderMap, Method}; use axum_client_ip::InsecureClientIp; use base64::Engine; use metrics::counter; +use serde_json::json; +use serde_json::Value; use tracing::instrument; use crate::limiters::billing::QuotaResource; @@ -27,31 +29,17 @@ use crate::{ /// /// Because it must accommodate several shapes, it is inefficient in places. A v1 /// endpoint should be created, that only accepts the BatchedRequest payload shape. - -#[instrument( - skip_all, - fields( - path, - token, - batch_size, - user_agent, - content_encoding, - content_type, - version, - compression, - historical_migration - ) -)] -#[debug_handler] -pub async fn event( - state: State, - InsecureClientIp(ip): InsecureClientIp, - meta: Query, - headers: HeaderMap, - method: Method, - path: MatchedPath, +#[allow(clippy::too_many_arguments)] +async fn handle_common( + state: &State, + InsecureClientIp(ip): &InsecureClientIp, + meta: &EventQuery, + headers: &HeaderMap, + method: &Method, + path: &MatchedPath, + quota_resource: QuotaResource, body: Bytes, -) -> Result, CaptureError> { +) -> Result<(ProcessingContext, Vec), CaptureError> { let user_agent = headers .get("user-agent") .map_or("unknown", |v| v.to_str().unwrap_or("unknown")); @@ -130,46 +118,157 @@ pub async fn event( let billing_limited = state .billing - .is_limited(context.token.as_str(), QuotaResource::Events) + .is_limited(context.token.as_str(), quota_resource) .await; if billing_limited { report_dropped_events("over_quota", events.len() as u64); - // for v0 we want to just return ok 🙃 - // this is because the clients are pretty dumb and will just retry over and over and - // over... - // - // for v1, we'll return a meaningful error code and error, so that the clients can do - // something meaningful with that error - return Ok(Json(CaptureResponse { - status: CaptureResponseCode::Ok, - })); + return Err(CaptureError::BillingLimit); } tracing::debug!(context=?context, events=?events, "decoded request"); - if let Err(err) = process_events(state.sink.clone(), &events, &context).await { - let cause = match err { - // TODO: automate this with a macro - CaptureError::EmptyDistinctId => "empty_distinct_id", - CaptureError::MissingDistinctId => "missing_distinct_id", - CaptureError::MissingEventName => "missing_event_name", - _ => "process_events_error", - }; - report_dropped_events(cause, events.len() as u64); - tracing::log::warn!("rejected invalid payload: {}", err); - return Err(err); + Ok((context, events)) +} + +#[instrument( + skip_all, + fields( + path, + token, + batch_size, + user_agent, + content_encoding, + content_type, + version, + compression, + historical_migration + ) +)] +#[debug_handler] +pub async fn event( + state: State, + ip: InsecureClientIp, + meta: Query, + headers: HeaderMap, + method: Method, + path: MatchedPath, + body: Bytes, +) -> Result, CaptureError> { + match handle_common( + &state, + &ip, + &meta, + &headers, + &method, + &path, + QuotaResource::Events, + body, + ) + .await + { + Err(CaptureError::BillingLimit) => { + // for v0 we want to just return ok 🙃 + // this is because the clients are pretty dumb and will just retry over and over and + // over... + // + // for v1, we'll return a meaningful error code and error, so that the clients can do + // something meaningful with that error + Ok(Json(CaptureResponse { + status: CaptureResponseCode::Ok, + quota_limited: None, + })) + } + Err(err) => Err(err), + Ok((context, events)) => { + if let Err(err) = process_events(state.sink.clone(), &events, &context).await { + let cause = match err { + CaptureError::EmptyDistinctId => "empty_distinct_id", + CaptureError::MissingDistinctId => "missing_distinct_id", + CaptureError::MissingEventName => "missing_event_name", + _ => "process_events_error", + }; + report_dropped_events(cause, events.len() as u64); + tracing::log::warn!("rejected invalid payload: {}", err); + return Err(err); + } + + Ok(Json(CaptureResponse { + status: CaptureResponseCode::Ok, + quota_limited: None, + })) + } } +} - Ok(Json(CaptureResponse { - status: CaptureResponseCode::Ok, - })) +#[instrument( + skip_all, + fields( + path, + token, + batch_size, + user_agent, + content_encoding, + content_type, + version, + compression, + historical_migration + ) +)] +#[debug_handler] +pub async fn recording( + state: State, + ip: InsecureClientIp, + meta: Query, + headers: HeaderMap, + method: Method, + path: MatchedPath, + body: Bytes, +) -> Result, CaptureError> { + match handle_common( + &state, + &ip, + &meta, + &headers, + &method, + &path, + QuotaResource::Recordings, + body, + ) + .await + { + Err(CaptureError::BillingLimit) => Ok(Json(CaptureResponse { + status: CaptureResponseCode::Ok, + quota_limited: Some(vec!["recordings".to_string()]), + })), + Err(err) => Err(err), + Ok((context, events)) => { + if let Err(err) = process_replay_events(state.sink.clone(), &events, &context).await { + let cause = match err { + CaptureError::EmptyDistinctId => "empty_distinct_id", + CaptureError::MissingDistinctId => "missing_distinct_id", + CaptureError::MissingSessionId => "missing_event_name", + CaptureError::MissingWindowId => "missing_event_name", + CaptureError::MissingEventName => "missing_event_name", + _ => "process_events_error", + }; + report_dropped_events(cause, events.len() as u64); + tracing::log::warn!("rejected invalid payload: {}", err); + return Err(err); + } + Ok(Json(CaptureResponse { + status: CaptureResponseCode::Ok, + quota_limited: None, + })) + } + } } pub async fn options() -> Result, CaptureError> { Ok(Json(CaptureResponse { status: CaptureResponseCode::Ok, + quota_limited: None, })) } @@ -204,6 +303,7 @@ pub fn process_single_event( now: context.now.clone(), sent_at: context.sent_at, token: context.token.clone(), + session_id: None, }) } @@ -226,3 +326,55 @@ pub async fn process_events<'a>( sink.send_batch(events).await } } + +#[instrument(skip_all, fields(events = events.len()))] +pub async fn process_replay_events<'a>( + sink: Arc, + events: &'a [RawEvent], + context: &'a ProcessingContext, +) -> Result<(), CaptureError> { + let snapshot_items: Vec = events + .iter() + .map(|e| match e.properties.get("$snapshot_data") { + Some(Value::Array(value)) => Ok(value.to_vec()), + Some(Value::Object(value)) => Ok([Value::Object(value.clone())].to_vec()), + _ => Err(CaptureError::MissingSnapshotData), + }) + .collect::>, CaptureError>>()? + .into_iter() + .flatten() + .collect(); + + let session_id = events[0] + .properties + .get("$session_id") + .ok_or(CaptureError::MissingSessionId)? + .as_str() + .ok_or(CaptureError::InvalidSessionId)?; + let window_id = events[0] + .properties + .get("$window_id") + .ok_or(CaptureError::MissingWindowId)?; + let event = ProcessedEvent { + data_type: DataType::SnapshotMain, + uuid: events[0].uuid.unwrap_or_else(uuid_v7), + distinct_id: events[0].extract_distinct_id()?, + ip: context.client_ip.clone(), + data: json!({ + "event": "$snapshot_items", + "properties": { + "distinct_id": events[0].extract_distinct_id()?, + "$session_id": session_id, + "$window_id": window_id, + "$snapshot_source": events[0].properties.get("$snapshot_source").unwrap_or(&Value::String(String::from("web"))), + "$snapshot_items": snapshot_items, + } + }).to_string(), + now: context.now.clone(), + sent_at: context.sent_at, + token: context.token.clone(), + session_id: Some(session_id.to_string()), + }; + + sink.send(event).await +} diff --git a/rust/capture/src/v0_request.rs b/rust/capture/src/v0_request.rs index c0d5f36d3577f..ae0c80fece453 100644 --- a/rust/capture/src/v0_request.rs +++ b/rust/capture/src/v0_request.rs @@ -3,7 +3,7 @@ use std::io::prelude::*; use bytes::{Buf, Bytes}; use flate2::read::GzDecoder; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Deserializer, Serialize}; use serde_json::Value; use time::format_description::well_known::Iso8601; use time::OffsetDateTime; @@ -56,6 +56,20 @@ pub struct EventFormData { pub data: String, } +pub fn empty_string_is_none<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let opt = Option::::deserialize(deserializer)?; + match opt { + None => Ok(None), + Some(s) if s.is_empty() => Ok(None), + Some(s) => Uuid::parse_str(&s) + .map(Some) + .map_err(serde::de::Error::custom), + } +} + #[derive(Default, Debug, Deserialize, Serialize)] pub struct RawEvent { #[serde( @@ -66,6 +80,7 @@ pub struct RawEvent { pub token: Option, #[serde(alias = "$distinct_id", skip_serializing_if = "Option::is_none")] pub distinct_id: Option, // posthog-js accepts arbitrary values as distinct_id + #[serde(default, deserialize_with = "empty_string_is_none")] pub uuid: Option, pub event: String, #[serde(default)] @@ -238,15 +253,30 @@ pub struct ProcessingContext { #[cfg(test)] mod tests { use crate::token::InvalidTokenReason; + use crate::v0_request::empty_string_is_none; use base64::Engine as _; use bytes::Bytes; use rand::distributions::Alphanumeric; use rand::Rng; + use serde::Deserialize; use serde_json::json; + use serde_json::Value; + use uuid::Uuid; use super::CaptureError; use super::RawRequest; + fn test_deserialize(json: Value) -> Result, serde_json::Error> { + #[derive(Deserialize)] + struct TestStruct { + #[serde(deserialize_with = "empty_string_is_none")] + uuid: Option, + } + + let result: TestStruct = serde_json::from_value(json)?; + Ok(result.uuid) + } + #[test] fn decode_uncompressed_raw_event() { let base64_payload = "ewogICAgImRpc3RpbmN0X2lkIjogIm15X2lkMSIsCiAgICAiZXZlbnQiOiAibXlfZXZlbnQxIiwKICAgICJwcm9wZXJ0aWVzIjogewogICAgICAgICIkZGV2aWNlX3R5cGUiOiAiRGVza3RvcCIKICAgIH0sCiAgICAiYXBpX2tleSI6ICJteV90b2tlbjEiCn0K"; @@ -431,4 +461,29 @@ mod tests { assert_extracted_token(r#"{"event":"e","$token":"single_token"}"#, "single_token"); assert_extracted_token(r#"{"event":"e","api_key":"single_token"}"#, "single_token"); } + + #[test] + fn test_empty_uuid_string_is_none() { + let json = serde_json::json!({"uuid": ""}); + let result = test_deserialize(json); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), None); + } + + #[test] + fn test_valid_uuid_is_some() { + let valid_uuid = "550e8400-e29b-41d4-a716-446655440000"; + let json = serde_json::json!({"uuid": valid_uuid}); + let result = test_deserialize(json); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), Some(Uuid::parse_str(valid_uuid).unwrap())); + } + + #[test] + fn test_invalid_uuid_is_error() { + let invalid_uuid = "not-a-uuid"; + let json = serde_json::json!({"uuid": invalid_uuid}); + let result = test_deserialize(json); + assert!(result.is_err()); + } } diff --git a/rust/capture/tests/common.rs b/rust/capture/tests/common.rs index 22d12d355a130..ee0b8659b42e5 100644 --- a/rust/capture/tests/common.rs +++ b/rust/capture/tests/common.rs @@ -25,7 +25,7 @@ use tokio::sync::Notify; use tokio::time::timeout; use tracing::{debug, warn}; -use capture::config::{Config, KafkaConfig}; +use capture::config::{CaptureMode, Config, KafkaConfig}; use capture::limiters::billing::QuotaResource; use capture::server::serve; @@ -41,6 +41,7 @@ pub static DEFAULT_CONFIG: Lazy = Lazy::new(|| Config { kafka_producer_linger_ms: 0, // Send messages as soon as possible kafka_producer_queue_mib: 10, kafka_message_timeout_ms: 10000, // 10s, ACKs can be slow on low volumes, should be tuned + kafka_producer_message_max_bytes: 1000000, // 1MB, rdkafka default kafka_compression_codec: "none".to_string(), kafka_hosts: "kafka:9092".to_string(), kafka_topic: "events_plugin_ingestion".to_string(), @@ -55,6 +56,7 @@ pub static DEFAULT_CONFIG: Lazy = Lazy::new(|| Config { otel_service_name: "capture-testing".to_string(), export_prometheus: false, redis_key_prefix: None, + capture_mode: CaptureMode::Events, }); static TRACING_INIT: Once = Once::new(); @@ -77,6 +79,12 @@ impl ServerHandle { config.kafka.kafka_historical_topic = historical.topic_name().to_string(); Self::for_config(config).await } + pub async fn for_recordings(main: &EphemeralTopic) -> Self { + let mut config = DEFAULT_CONFIG.clone(); + config.kafka.kafka_topic = main.topic_name().to_string(); + config.capture_mode = CaptureMode::Recordings; + Self::for_config(config).await + } pub async fn for_config(config: Config) -> Self { let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let addr = listener.local_addr().unwrap(); @@ -108,6 +116,16 @@ impl ServerHandle { .await .expect("failed to send request") } + + pub async fn capture_recording>(&self, body: T) -> reqwest::Response { + let client = reqwest::Client::new(); + client + .post(format!("http://{:?}/s/", self.addr)) + .body(body) + .send() + .await + .expect("failed to send request") + } } impl Drop for ServerHandle { diff --git a/rust/capture/tests/django_compat.rs b/rust/capture/tests/django_compat.rs index abbc5356803b9..5c4bff8d27233 100644 --- a/rust/capture/tests/django_compat.rs +++ b/rust/capture/tests/django_compat.rs @@ -5,6 +5,7 @@ use axum_test_helper::TestClient; use base64::engine::general_purpose; use base64::Engine; use capture::api::{CaptureError, CaptureResponse, CaptureResponseCode, DataType, ProcessedEvent}; +use capture::config::CaptureMode; use capture::limiters::billing::BillingLimiter; use capture::redis::MockRedisClient; use capture::router::router; @@ -110,6 +111,7 @@ async fn it_matches_django_capture_behaviour() -> anyhow::Result<()> { redis, billing, false, + CaptureMode::Events, ); let client = TestClient::new(app); @@ -134,7 +136,8 @@ async fn it_matches_django_capture_behaviour() -> anyhow::Result<()> { ); assert_eq!( Some(CaptureResponse { - status: CaptureResponseCode::Ok + status: CaptureResponseCode::Ok, + quota_limited: None, }), res.json().await ); diff --git a/rust/capture/tests/recordings.rs b/rust/capture/tests/recordings.rs new file mode 100644 index 0000000000000..a54638497b987 --- /dev/null +++ b/rust/capture/tests/recordings.rs @@ -0,0 +1,95 @@ +use crate::common::*; +use anyhow::Result; +use assert_json_diff::assert_json_include; +use reqwest::StatusCode; +use serde_json::json; +mod common; + +#[tokio::test] +async fn it_captures_one_recording() -> Result<()> { + setup_tracing(); + let token = random_string("token", 16); + let distinct_id = random_string("id", 16); + let session_id = random_string("id", 16); + let window_id = random_string("id", 16); + + let main_topic = EphemeralTopic::new().await; + let server = ServerHandle::for_recordings(&main_topic).await; + + let event = json!({ + "token": token, + "event": "testing", + "distinct_id": distinct_id, + "$session_id": session_id, + "properties": { + "$session_id": session_id, + "$window_id": window_id, + "$snapshot_data": [], + } + }); + let res = server.capture_recording(event.to_string()).await; + assert_eq!(StatusCode::OK, res.status()); + + let event = main_topic.next_event()?; + assert_json_include!( + actual: event, + expected: json!({ + "token": token, + "distinct_id": distinct_id + }) + ); + + Ok(()) +} + +#[tokio::test] +async fn it_fails_no_session_id() -> Result<()> { + setup_tracing(); + let token = random_string("token", 16); + let distinct_id = random_string("id", 16); + let session_id = random_string("id", 16); + let window_id = random_string("id", 16); + + let main_topic = EphemeralTopic::new().await; + let server = ServerHandle::for_recordings(&main_topic).await; + + let event = json!({ + "token": token, + "event": "testing", + "distinct_id": distinct_id, + "$session_id": session_id, + "properties": { + "$window_id": window_id, + "$snapshot_data": [], + } + }); + let res = server.capture_recording(event.to_string()).await; + assert_eq!(StatusCode::BAD_REQUEST, res.status()); + Ok(()) +} + +#[tokio::test] +async fn it_rejects_bad_session_id() -> Result<()> { + setup_tracing(); + let token = random_string("token", 16); + let distinct_id = random_string("id", 16); + let window_id = random_string("id", 16); + + let main_topic = EphemeralTopic::new().await; + let server = ServerHandle::for_recordings(&main_topic).await; + + let event = json!({ + "token": token, + "event": "testing", + "distinct_id": distinct_id, + "$session_id": {"should_not_be": "an object"}, + "properties": { + "$session_id": {"should_not_be": "an object"}, + "$window_id": window_id, + "$snapshot_data": [], + } + }); + let res = server.capture_recording(event.to_string()).await; + assert_eq!(StatusCode::BAD_REQUEST, res.status()); + Ok(()) +} diff --git a/rust/common/dns/Cargo.toml b/rust/common/dns/Cargo.toml new file mode 100644 index 0000000000000..b67a2d04e63a5 --- /dev/null +++ b/rust/common/dns/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "common-dns" +version = "0.1.0" +edition = "2021" + +[lints] +workspace = true + +[dependencies] +futures = { workspace = true } +reqwest = { workspace = true } +tokio = { workspace = true } \ No newline at end of file diff --git a/rust/hook-worker/src/dns.rs b/rust/common/dns/src/lib.rs similarity index 98% rename from rust/hook-worker/src/dns.rs rename to rust/common/dns/src/lib.rs index 36fd7a005398e..1204743725750 100644 --- a/rust/hook-worker/src/dns.rs +++ b/rust/common/dns/src/lib.rs @@ -86,7 +86,7 @@ impl Resolve for PublicIPv4Resolver { #[cfg(test)] mod tests { - use crate::dns::{NoPublicIPv4Error, PublicIPv4Resolver}; + use crate::{NoPublicIPv4Error, PublicIPv4Resolver}; use reqwest::dns::{Name, Resolve}; use std::str::FromStr; diff --git a/rust/common/health/src/lib.rs b/rust/common/health/src/lib.rs index 5d42bafa8ff05..4a1370e550922 100644 --- a/rust/common/health/src/lib.rs +++ b/rust/common/health/src/lib.rs @@ -4,7 +4,7 @@ use std::sync::{Arc, RwLock}; use axum::http::StatusCode; use axum::response::{IntoResponse, Response}; -use time::Duration; +use std::time::Duration; use tokio::sync::mpsc; use tracing::{info, warn}; @@ -143,7 +143,16 @@ impl HealthRegistry { /// Registers a new component in the registry. The returned handle should be passed /// to the component, to allow it to frequently report its health status. - pub async fn register(&self, component: String, deadline: Duration) -> HealthHandle { + pub async fn register(&self, component: String, deadline: D) -> HealthHandle + where + // HACK: to let callers user time::Duration or std::time::Duration (and therefore chrono::Duration), + // since apparently we use all three + D: TryInto, + { + let Ok(deadline) = deadline.try_into() else { + // TODO - I should return an error here, but I don't want to refactor everything that uses this right now + panic!("invalid deadline") + }; let handle = HealthHandle { component, deadline, diff --git a/rust/common/metrics/Cargo.toml b/rust/common/metrics/Cargo.toml new file mode 100644 index 0000000000000..14ed059df0f19 --- /dev/null +++ b/rust/common/metrics/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "common-metrics" +version = "0.1.0" +edition = "2021" + +[lints] +workspace = true + +[dependencies] +axum = { workspace = true } +metrics-exporter-prometheus = { workspace = true } +tokio = { workspace = true } +metrics = { workspace = true } \ No newline at end of file diff --git a/rust/common/metrics/README.md b/rust/common/metrics/README.md new file mode 100644 index 0000000000000..4788321ecddfc --- /dev/null +++ b/rust/common/metrics/README.md @@ -0,0 +1 @@ +Ripped from rusty-hook, since it'll be used across more or less all cyclotron stuff, as well as rustyhook \ No newline at end of file diff --git a/rust/common/metrics/src/lib.rs b/rust/common/metrics/src/lib.rs new file mode 100644 index 0000000000000..0089736300ba4 --- /dev/null +++ b/rust/common/metrics/src/lib.rs @@ -0,0 +1,169 @@ +use std::time::{Instant, SystemTime}; + +use axum::{ + body::Body, extract::MatchedPath, http::Request, middleware::Next, response::IntoResponse, + routing::get, Router, +}; +use metrics_exporter_prometheus::{PrometheusBuilder, PrometheusHandle}; + +/// Bind a `TcpListener` on the provided bind address to serve a `Router` on it. +/// This function is intended to take a Router as returned by `setup_metrics_router`, potentially with more routes added by the caller. +pub async fn serve(router: Router, bind: &str) -> Result<(), std::io::Error> { + let listener = tokio::net::TcpListener::bind(bind).await?; + + axum::serve(listener, router).await?; + + Ok(()) +} + +/// Add the prometheus endpoint and middleware to a router, should be called last. +pub fn setup_metrics_routes(router: Router) -> Router { + let recorder_handle = setup_metrics_recorder(); + + router + .route( + "/metrics", + get(move || std::future::ready(recorder_handle.render())), + ) + .layer(axum::middleware::from_fn(track_metrics)) +} + +pub fn setup_metrics_recorder() -> PrometheusHandle { + const BUCKETS: &[f64] = &[ + 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0, 50.0, 100.0, 250.0, + ]; + + PrometheusBuilder::new() + .set_buckets(BUCKETS) + .unwrap() + .install_recorder() + .unwrap() +} + +/// Middleware to record some common HTTP metrics +/// Someday tower-http might provide a metrics middleware: https://github.com/tower-rs/tower-http/issues/57 +pub async fn track_metrics(req: Request, next: Next) -> impl IntoResponse { + let start = Instant::now(); + + let path = if let Some(matched_path) = req.extensions().get::() { + matched_path.as_str().to_owned() + } else { + req.uri().path().to_owned() + }; + + let method = req.method().clone(); + + // Run the rest of the request handling first, so we can measure it and get response + // codes. + let response = next.run(req).await; + + let latency = start.elapsed().as_secs_f64(); + let status = response.status().as_u16().to_string(); + + let labels = [ + ("method", method.to_string()), + ("path", path), + ("status", status), + ]; + + metrics::counter!("http_requests_total", &labels).increment(1); + metrics::histogram!("http_requests_duration_seconds", &labels).record(latency); + + response +} + +/// Returns the number of seconds since the Unix epoch, to use in prom gauges. +/// Saturates to zero if the system time is set before epoch. +pub fn get_current_timestamp_seconds() -> f64 { + SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap_or_default() + .as_secs() as f64 +} + +// Shorthand for common metric types +pub fn inc(name: &'static str, labels: &[(String, String)], value: u64) { + metrics::counter!(name, labels).increment(value); +} + +pub fn gauge(name: &'static str, lables: &[(String, String)], value: f64) { + metrics::gauge!(name, lables).set(value); +} + +// A guard to record the time between creation and drop as a histogram entry +pub struct TimingGuard<'a> { + name: &'static str, + labels: TimingGuardLabels<'a>, + start: Instant, +} + +// Shorthand constructor for that guard +pub fn timing_guard<'a>(name: &'static str, labels: &'a [(String, String)]) -> TimingGuard<'a> { + TimingGuard { + name, + labels: TimingGuardLabels::new(labels), + start: Instant::now(), + } +} + +// Timing guards start out cheap to construct, but if you want to push extra +// labels onto them, they'll need to allocate. This enum tracks that state. +enum TimingGuardLabels<'a> { + None, + Borrowed(&'a [(String, String)]), + Owned(Vec<(String, String)>), +} + +impl<'a> TimingGuard<'a> { + // This consumes the guard, making "label this span and then immediately report the timing" + // a one-liner (simple don't re-bind the return value), but also it's a bit of a footgun. + pub fn label(mut self, key: &str, value: &str) -> Self { + self.labels.push_label(key, value); + self + } + + // This is meant to be used with the above to make what's happening more obvious. I don't know + // if it's good enough, but it's an improvement. + pub fn fin(self) {} +} + +impl<'a> Drop for TimingGuard<'a> { + fn drop(&mut self) { + let labels = self.labels.as_slice(); + metrics::histogram!(self.name, labels).record(self.start.elapsed().as_millis() as f64); + } +} + +impl<'a> TimingGuardLabels<'a> { + fn new(labels: &'a [(String, String)]) -> Self { + if labels.is_empty() { + TimingGuardLabels::None + } else { + TimingGuardLabels::Borrowed(labels) + } + } + + fn as_slice(&self) -> &[(String, String)] { + match self { + TimingGuardLabels::None => &[], + TimingGuardLabels::Borrowed(labels) => labels, + TimingGuardLabels::Owned(labels) => labels, + } + } + + fn push_label(&mut self, key: &str, value: &str) { + match self { + TimingGuardLabels::None => { + *self = TimingGuardLabels::Owned(vec![(key.to_string(), value.to_string())]); + } + TimingGuardLabels::Borrowed(labels) => { + let mut existing = labels.to_vec(); + existing.push((key.to_string(), value.to_string())); + *self = TimingGuardLabels::Owned(existing); + } + TimingGuardLabels::Owned(labels) => { + labels.push((key.to_string(), value.to_string())); + } + }; + } +} diff --git a/rust/common/serve_metrics/Cargo.toml b/rust/common/serve_metrics/Cargo.toml new file mode 100644 index 0000000000000..05eb90c0bd29c --- /dev/null +++ b/rust/common/serve_metrics/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "serve-metrics" +version = "0.1.0" +edition = "2021" + +[lints] +workspace = true + +[dependencies] +axum = { workspace = true } +tokio = { workspace = true } +metrics-exporter-prometheus = { workspace = true } +metrics = { workspace = true } \ No newline at end of file diff --git a/rust/hook-common/src/metrics.rs b/rust/common/serve_metrics/src/lib.rs similarity index 100% rename from rust/hook-common/src/metrics.rs rename to rust/common/serve_metrics/src/lib.rs diff --git a/rust/cyclotron-core/.sqlx/query-075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503.json b/rust/cyclotron-core/.sqlx/query-075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503.json new file mode 100644 index 0000000000000..a2cb4e3a0a883 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET last_heartbeat = NOW() WHERE id = $1 AND lock_id = $2", + "describe": { + "columns": [], + "parameters": { + "Left": ["Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "075421be22b51c50eb74ac1156175c285bc510766c175b1b8c4e4002e04ff503" +} diff --git a/rust/cyclotron-core/.sqlx/query-16d533b5a15b0b9926a181f578b5b577efe424710b45f02e1ddeece8bca96f87.json b/rust/cyclotron-core/.sqlx/query-16d533b5a15b0b9926a181f578b5b577efe424710b45f02e1ddeece8bca96f87.json new file mode 100644 index 0000000000000..23b0665a2d357 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-16d533b5a15b0b9926a181f578b5b577efe424710b45f02e1ddeece8bca96f87.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET vm_state = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Bytea", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "16d533b5a15b0b9926a181f578b5b577efe424710b45f02e1ddeece8bca96f87" +} diff --git a/rust/cyclotron-core/.sqlx/query-213e9d70e145a01fb42d5c3a80f9126073113a4af03c4c9fd3a81004d898f883.json b/rust/cyclotron-core/.sqlx/query-213e9d70e145a01fb42d5c3a80f9126073113a4af03c4c9fd3a81004d898f883.json new file mode 100644 index 0000000000000..f9150cfcda3e1 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-213e9d70e145a01fb42d5c3a80f9126073113a4af03c4c9fd3a81004d898f883.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT COUNT(*) FROM cyclotron_jobs WHERE state = 'available' AND scheduled <= NOW()", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [null] + }, + "hash": "213e9d70e145a01fb42d5c3a80f9126073113a4af03c4c9fd3a81004d898f883" +} diff --git a/rust/cyclotron-core/.sqlx/query-229c28c25aec24180c29e6ed636c165376f43116b85921c62b36b1b8e85562b0.json b/rust/cyclotron-core/.sqlx/query-229c28c25aec24180c29e6ed636c165376f43116b85921c62b36b1b8e85562b0.json new file mode 100644 index 0000000000000..ffda6f4b70b26 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-229c28c25aec24180c29e6ed636c165376f43116b85921c62b36b1b8e85562b0.json @@ -0,0 +1,123 @@ +{ + "db_name": "PostgreSQL", + "query": "\nWITH available AS (\n SELECT\n id,\n state\n FROM cyclotron_jobs\n WHERE\n state = 'available'::JobState\n AND queue_name = $1\n AND scheduled <= NOW()\n ORDER BY\n priority ASC,\n scheduled ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n)\nUPDATE cyclotron_jobs\nSET\n state = 'running'::JobState,\n lock_id = $3,\n last_heartbeat = NOW(),\n last_transition = NOW(),\n transition_count = transition_count + 1\nFROM available\nWHERE\n cyclotron_jobs.id = available.id\nRETURNING\n cyclotron_jobs.id,\n team_id,\n available.state as \"state: JobState\",\n queue_name,\n priority,\n function_id,\n created,\n last_transition,\n scheduled,\n transition_count,\n NULL::bytea as vm_state,\n metadata,\n parameters,\n blob,\n lock_id,\n last_heartbeat,\n janitor_touch_count\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "team_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "state: JobState", + "type_info": { + "Custom": { + "name": "jobstate", + "kind": { + "Enum": ["available", "completed", "failed", "running", "paused"] + } + } + } + }, + { + "ordinal": 3, + "name": "queue_name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "priority", + "type_info": "Int2" + }, + { + "ordinal": 5, + "name": "function_id", + "type_info": "Uuid" + }, + { + "ordinal": 6, + "name": "created", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "last_transition", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "scheduled", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "transition_count", + "type_info": "Int2" + }, + { + "ordinal": 10, + "name": "vm_state", + "type_info": "Bytea" + }, + { + "ordinal": 11, + "name": "metadata", + "type_info": "Bytea" + }, + { + "ordinal": 12, + "name": "parameters", + "type_info": "Bytea" + }, + { + "ordinal": 13, + "name": "blob", + "type_info": "Bytea" + }, + { + "ordinal": 14, + "name": "lock_id", + "type_info": "Uuid" + }, + { + "ordinal": 15, + "name": "last_heartbeat", + "type_info": "Timestamptz" + }, + { + "ordinal": 16, + "name": "janitor_touch_count", + "type_info": "Int2" + } + ], + "parameters": { + "Left": ["Text", "Int8", "Uuid"] + }, + "nullable": [ + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + null, + true, + true, + true, + true, + true, + false + ] + }, + "hash": "229c28c25aec24180c29e6ed636c165376f43116b85921c62b36b1b8e85562b0" +} diff --git a/rust/cyclotron-core/.sqlx/query-2b62adf40f8dd5758690c763df30fffa01b679951ec786f8ee2410454b9a2de0.json b/rust/cyclotron-core/.sqlx/query-2b62adf40f8dd5758690c763df30fffa01b679951ec786f8ee2410454b9a2de0.json new file mode 100644 index 0000000000000..3c2761eccb0a5 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-2b62adf40f8dd5758690c763df30fffa01b679951ec786f8ee2410454b9a2de0.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET queue_name = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Text", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "2b62adf40f8dd5758690c763df30fffa01b679951ec786f8ee2410454b9a2de0" +} diff --git a/rust/cyclotron-core/.sqlx/query-2f6de0977357909dfd8d3d510c39a284f16421f77b77fe38e67143f28e270805.json b/rust/cyclotron-core/.sqlx/query-2f6de0977357909dfd8d3d510c39a284f16421f77b77fe38e67143f28e270805.json new file mode 100644 index 0000000000000..b0e1ef221041f --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-2f6de0977357909dfd8d3d510c39a284f16421f77b77fe38e67143f28e270805.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET priority = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Int2", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "2f6de0977357909dfd8d3d510c39a284f16421f77b77fe38e67143f28e270805" +} diff --git a/rust/cyclotron-core/.sqlx/query-54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d.json b/rust/cyclotron-core/.sqlx/query-54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d.json new file mode 100644 index 0000000000000..2ff58c66714a1 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "\nWITH stalled AS (\n SELECT id FROM cyclotron_jobs WHERE state = 'running' AND COALESCE(last_heartbeat, $1) <= $1 FOR UPDATE SKIP LOCKED\n)\nUPDATE cyclotron_jobs\nSET state = 'available', lock_id = NULL, last_heartbeat = NULL, janitor_touch_count = janitor_touch_count + 1\nFROM stalled\nWHERE cyclotron_jobs.id = stalled.id\n ", + "describe": { + "columns": [], + "parameters": { + "Left": ["Timestamptz"] + }, + "nullable": [] + }, + "hash": "54d9afe6952f92b753fbce2c4e8554065b71152389f98d35532c6b332d5a4c9d" +} diff --git a/rust/cyclotron-core/.sqlx/query-58dfd4671ac3497614b184384ac7f8d490dda8b27a150454e413d02f89c92050.json b/rust/cyclotron-core/.sqlx/query-58dfd4671ac3497614b184384ac7f8d490dda8b27a150454e413d02f89c92050.json new file mode 100644 index 0000000000000..5a2231c7c2fdd --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-58dfd4671ac3497614b184384ac7f8d490dda8b27a150454e413d02f89c92050.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET blob = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Bytea", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "58dfd4671ac3497614b184384ac7f8d490dda8b27a150454e413d02f89c92050" +} diff --git a/rust/cyclotron-core/.sqlx/query-884da9767d2992c7b279b4f8df5129b8c4d7020b7cb1999702aee1bfb1087efb.json b/rust/cyclotron-core/.sqlx/query-884da9767d2992c7b279b4f8df5129b8c4d7020b7cb1999702aee1bfb1087efb.json new file mode 100644 index 0000000000000..b728d398568c5 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-884da9767d2992c7b279b4f8df5129b8c4d7020b7cb1999702aee1bfb1087efb.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET lock_id = NULL, last_heartbeat = NULL WHERE id = $1 AND lock_id = $2", + "describe": { + "columns": [], + "parameters": { + "Left": ["Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "884da9767d2992c7b279b4f8df5129b8c4d7020b7cb1999702aee1bfb1087efb" +} diff --git a/rust/cyclotron-core/.sqlx/query-8ab11a89bc4720985e130c58021f46045c332cc45e69b08708b289cc933b3b5c.json b/rust/cyclotron-core/.sqlx/query-8ab11a89bc4720985e130c58021f46045c332cc45e69b08708b289cc933b3b5c.json new file mode 100644 index 0000000000000..66ae665232405 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-8ab11a89bc4720985e130c58021f46045c332cc45e69b08708b289cc933b3b5c.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET metadata = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Bytea", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "8ab11a89bc4720985e130c58021f46045c332cc45e69b08708b289cc933b3b5c" +} diff --git a/rust/cyclotron-core/.sqlx/query-98da1f12285a97a47ce88535c82a8c4eb4cb910b0c5ddbc6bdbd9156af7b4e23.json b/rust/cyclotron-core/.sqlx/query-98da1f12285a97a47ce88535c82a8c4eb4cb910b0c5ddbc6bdbd9156af7b4e23.json new file mode 100644 index 0000000000000..59a56c441cb7c --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-98da1f12285a97a47ce88535c82a8c4eb4cb910b0c5ddbc6bdbd9156af7b4e23.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET scheduled = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Timestamptz", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "98da1f12285a97a47ce88535c82a8c4eb4cb910b0c5ddbc6bdbd9156af7b4e23" +} diff --git a/rust/cyclotron-core/.sqlx/query-aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632.json b/rust/cyclotron-core/.sqlx/query-aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632.json new file mode 100644 index 0000000000000..51fb1b018120b --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT vm_state FROM cyclotron_jobs WHERE id = $1 AND lock_id = $2", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "vm_state", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": ["Uuid", "Uuid"] + }, + "nullable": [true] + }, + "hash": "aa595eaf28c1f4b872c278be407b59cc00f3125413f4032ac3647a6b5ee1a632" +} diff --git a/rust/cyclotron-core/.sqlx/query-b160b785a0377b854341105e99e4ef7a5da523e168a5f9be47f6caaef09487d7.json b/rust/cyclotron-core/.sqlx/query-b160b785a0377b854341105e99e4ef7a5da523e168a5f9be47f6caaef09487d7.json new file mode 100644 index 0000000000000..4364f2fee8816 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-b160b785a0377b854341105e99e4ef7a5da523e168a5f9be47f6caaef09487d7.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs SET parameters = $1 WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": ["Bytea", "Uuid", "Uuid"] + }, + "nullable": [] + }, + "hash": "b160b785a0377b854341105e99e4ef7a5da523e168a5f9be47f6caaef09487d7" +} diff --git a/rust/cyclotron-core/.sqlx/query-b3239c1dde9a88769ec488299612f7a3ad2b2ee57d8d2353c858299d7b6ffe13.json b/rust/cyclotron-core/.sqlx/query-b3239c1dde9a88769ec488299612f7a3ad2b2ee57d8d2353c858299d7b6ffe13.json new file mode 100644 index 0000000000000..d2942f91b1930 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-b3239c1dde9a88769ec488299612f7a3ad2b2ee57d8d2353c858299d7b6ffe13.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE cyclotron_jobs\n SET state = $1, last_transition = NOW(), transition_count = transition_count + 1\n WHERE id = $2 AND lock_id = $3", + "describe": { + "columns": [], + "parameters": { + "Left": [ + { + "Custom": { + "name": "jobstate", + "kind": { + "Enum": ["available", "completed", "failed", "running", "paused"] + } + } + }, + "Uuid", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "b3239c1dde9a88769ec488299612f7a3ad2b2ee57d8d2353c858299d7b6ffe13" +} diff --git a/rust/cyclotron-core/.sqlx/query-ce036f16a37a41b9dc5a164de0b52345454cd3323568c4bef5b8480380287068.json b/rust/cyclotron-core/.sqlx/query-ce036f16a37a41b9dc5a164de0b52345454cd3323568c4bef5b8480380287068.json new file mode 100644 index 0000000000000..fe174820c3a07 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-ce036f16a37a41b9dc5a164de0b52345454cd3323568c4bef5b8480380287068.json @@ -0,0 +1,123 @@ +{ + "db_name": "PostgreSQL", + "query": "\nWITH available AS (\n SELECT\n id,\n state\n FROM cyclotron_jobs\n WHERE\n state = 'available'::JobState\n AND queue_name = $1\n AND scheduled <= NOW()\n ORDER BY\n priority ASC,\n scheduled ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n)\nUPDATE cyclotron_jobs\nSET\n state = 'running'::JobState,\n lock_id = $3,\n last_heartbeat = NOW(),\n last_transition = NOW(),\n transition_count = transition_count + 1\nFROM available\nWHERE\n cyclotron_jobs.id = available.id\nRETURNING\n cyclotron_jobs.id,\n team_id,\n available.state as \"state: JobState\",\n queue_name,\n priority,\n function_id,\n created,\n last_transition,\n scheduled,\n transition_count,\n vm_state,\n metadata,\n parameters,\n blob,\n lock_id,\n last_heartbeat,\n janitor_touch_count\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Uuid" + }, + { + "ordinal": 1, + "name": "team_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "state: JobState", + "type_info": { + "Custom": { + "name": "jobstate", + "kind": { + "Enum": ["available", "completed", "failed", "running", "paused"] + } + } + } + }, + { + "ordinal": 3, + "name": "queue_name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "priority", + "type_info": "Int2" + }, + { + "ordinal": 5, + "name": "function_id", + "type_info": "Uuid" + }, + { + "ordinal": 6, + "name": "created", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "last_transition", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "scheduled", + "type_info": "Timestamptz" + }, + { + "ordinal": 9, + "name": "transition_count", + "type_info": "Int2" + }, + { + "ordinal": 10, + "name": "vm_state", + "type_info": "Bytea" + }, + { + "ordinal": 11, + "name": "metadata", + "type_info": "Bytea" + }, + { + "ordinal": 12, + "name": "parameters", + "type_info": "Bytea" + }, + { + "ordinal": 13, + "name": "blob", + "type_info": "Bytea" + }, + { + "ordinal": 14, + "name": "lock_id", + "type_info": "Uuid" + }, + { + "ordinal": 15, + "name": "last_heartbeat", + "type_info": "Timestamptz" + }, + { + "ordinal": 16, + "name": "janitor_touch_count", + "type_info": "Int2" + } + ], + "parameters": { + "Left": ["Text", "Int8", "Uuid"] + }, + "nullable": [ + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + false + ] + }, + "hash": "ce036f16a37a41b9dc5a164de0b52345454cd3323568c4bef5b8480380287068" +} diff --git a/rust/cyclotron-core/.sqlx/query-f074766d1fc32df17f92667f412af30c682288988fc6f102e8a063be97c3e51c.json b/rust/cyclotron-core/.sqlx/query-f074766d1fc32df17f92667f412af30c682288988fc6f102e8a063be97c3e51c.json new file mode 100644 index 0000000000000..6139be53026c1 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-f074766d1fc32df17f92667f412af30c682288988fc6f102e8a063be97c3e51c.json @@ -0,0 +1,31 @@ +{ + "db_name": "PostgreSQL", + "query": "\nINSERT INTO cyclotron_jobs\n (\n id,\n team_id,\n function_id,\n created,\n lock_id,\n last_heartbeat,\n janitor_touch_count,\n transition_count,\n last_transition,\n queue_name,\n state,\n scheduled,\n priority,\n vm_state,\n metadata,\n parameters,\n blob\n )\nVALUES\n ($1, $2, $3, NOW(), NULL, NULL, 0, 0, NOW(), $4, $5, $6, $7, $8, $9, $10, $11)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Int4", + "Uuid", + "Text", + { + "Custom": { + "name": "jobstate", + "kind": { + "Enum": ["available", "completed", "failed", "running", "paused"] + } + } + }, + "Timestamptz", + "Int2", + "Bytea", + "Bytea", + "Bytea", + "Bytea" + ] + }, + "nullable": [] + }, + "hash": "f074766d1fc32df17f92667f412af30c682288988fc6f102e8a063be97c3e51c" +} diff --git a/rust/cyclotron-core/.sqlx/query-f4e808f58dd290c6e2b49b63e9e0eb022936ba318021512a0cc0c2e0766abe7c.json b/rust/cyclotron-core/.sqlx/query-f4e808f58dd290c6e2b49b63e9e0eb022936ba318021512a0cc0c2e0766abe7c.json new file mode 100644 index 0000000000000..a585e9f7e7d73 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-f4e808f58dd290c6e2b49b63e9e0eb022936ba318021512a0cc0c2e0766abe7c.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM cyclotron_jobs WHERE state = 'completed'", + "describe": { + "columns": [], + "parameters": { + "Left": [] + }, + "nullable": [] + }, + "hash": "f4e808f58dd290c6e2b49b63e9e0eb022936ba318021512a0cc0c2e0766abe7c" +} diff --git a/rust/cyclotron-core/.sqlx/query-fdda5a80f5495f2d4b15ce1a0963f990986c8b8433f01e449fbd1eee70ce6aeb.json b/rust/cyclotron-core/.sqlx/query-fdda5a80f5495f2d4b15ce1a0963f990986c8b8433f01e449fbd1eee70ce6aeb.json new file mode 100644 index 0000000000000..09fc24b340d3f --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-fdda5a80f5495f2d4b15ce1a0963f990986c8b8433f01e449fbd1eee70ce6aeb.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "\nDELETE FROM cyclotron_jobs WHERE state = 'running' AND COALESCE(last_heartbeat, $1) <= $1 AND janitor_touch_count >= $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": ["Timestamptz", "Int2"] + }, + "nullable": [] + }, + "hash": "fdda5a80f5495f2d4b15ce1a0963f990986c8b8433f01e449fbd1eee70ce6aeb" +} diff --git a/rust/cyclotron-core/.sqlx/query-ffb66bdedf6506f95b9293ef88b0c51e2f5fb7d3271e1287165d2a35b6aaa25e.json b/rust/cyclotron-core/.sqlx/query-ffb66bdedf6506f95b9293ef88b0c51e2f5fb7d3271e1287165d2a35b6aaa25e.json new file mode 100644 index 0000000000000..605d79d57c098 --- /dev/null +++ b/rust/cyclotron-core/.sqlx/query-ffb66bdedf6506f95b9293ef88b0c51e2f5fb7d3271e1287165d2a35b6aaa25e.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM cyclotron_jobs WHERE state = 'failed'", + "describe": { + "columns": [], + "parameters": { + "Left": [] + }, + "nullable": [] + }, + "hash": "ffb66bdedf6506f95b9293ef88b0c51e2f5fb7d3271e1287165d2a35b6aaa25e" +} diff --git a/rust/cyclotron-core/Cargo.toml b/rust/cyclotron-core/Cargo.toml new file mode 100644 index 0000000000000..bfec9301eee72 --- /dev/null +++ b/rust/cyclotron-core/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "cyclotron-core" +version = "0.1.0" +edition = "2021" + +[lints] +workspace = true + +[dependencies] +serde = { workspace = true } +sqlx = { workspace = true } +chrono = { workspace = true } +tokio = { workspace = true } +thiserror = { workspace = true } +uuid = { workspace = true } +rand = { workspace = true } +futures = { workspace = true } \ No newline at end of file diff --git a/rust/cyclotron-core/migrations/20240804122549_initial_job_queue_schema.sql b/rust/cyclotron-core/migrations/20240804122549_initial_job_queue_schema.sql new file mode 100644 index 0000000000000..a69c44ef90f77 --- /dev/null +++ b/rust/cyclotron-core/migrations/20240804122549_initial_job_queue_schema.sql @@ -0,0 +1,113 @@ +CREATE TYPE JobState AS ENUM( + 'available', + 'completed', + 'failed', + 'running', + 'paused' +); + + +--------------------------------------------------------------------- +-- Job table +--------------------------------------------------------------------- +-- When a job is dequeued, it is locked by generating a UUID and returning it to the dequeuing +-- worker. Any worker that can't provide the correct lock_id when updating will have their updates +-- rejected. The reason this is important is because if, e.g., a worker holds a job in a running +-- state without updating the heartbeat, the janitor will return the job to the queue eventually, +-- and if the worker /then/ tries to update the job after another worker has picked it up, that's a +-- race. We track transition count and times alongside lock_id's and heartbeats for reporting and +-- debugging purposes, and we track the number of times the janitor has touched a job to spot poison +-- pills. +CREATE TABLE IF NOT EXISTS cyclotron_jobs ( + --------------------------------------------------------------------- + -- Job metadata + --------------------------------------------------------------------- + id UUID PRIMARY KEY, + team_id INT NOT NULL, + function_id UUID, + created TIMESTAMPTZ NOT NULL, + --------------------------------------------------------------------- + -- Queue bookkeeping - invisible to the worker + --------------------------------------------------------------------- + lock_id UUID, + -- This is set when a job is in a running state, and is required to update the job. + last_heartbeat TIMESTAMPTZ, + -- This is updated by the worker to indicate that the job is making forward progress even + -- without transitions (and should not be reaped) + janitor_touch_count SMALLINT NOT NULL, + transition_count SMALLINT NOT NULL, + last_transition TIMESTAMPTZ NOT NULL, + --------------------------------------------------------------------- + -- Queue components - determines which workers will consume this job + --------------------------------------------------------------------- + queue_name TEXT NOT NULL, + --------------------------------------------------------------------- + -- Job availability and priority (can this job be dequeued, and in what order?) + --------------------------------------------------------------------- + state JobState NOT NULL, + scheduled TIMESTAMPTZ NOT NULL, + priority SMALLINT NOT NULL, + --------------------------------------------------------------------- + -- Job data + --------------------------------------------------------------------- + vm_state TEXT, + -- This is meant for workers "talking to themselves", e.g. tracking retries or something + metadata TEXT, + -- This is meant for "the next guy" - hog might fill it with a URL to fetch, for example + parameters TEXT +); + +-- For a given worker, the set of "available" jobs depends on state, queue_name, and scheduled (so +-- we can exclude sleeping jobs). This index is partial, because we don't care about other states +-- for the purpose of dequeuing +CREATE INDEX idx_cyclotron_jobs_dequeue ON cyclotron_jobs (queue_name, state, scheduled, priority) +WHERE + state = 'available'; + +-- We create simple indexes on team_id, function_id and queue_name to support fast joins to future +-- control tables +CREATE INDEX idx_queue_team_id ON cyclotron_jobs(team_id); + +CREATE INDEX idx_queue_function_id ON cyclotron_jobs(function_id); + +CREATE INDEX idx_queue_queue_name ON cyclotron_jobs(queue_name); + + +--------------------------------------------------------------------- +-- Control tables +--------------------------------------------------------------------- + + +-- The dead letter metadata table - when a job is DLQ'd, whovever does it leaves a note here for us. +CREATE TABLE IF NOT EXISTS cyclotron_dead_letter_metadata ( + job_id UUID PRIMARY KEY, + -- The queue the job was on before it was DLQ'd (important if e.g. we want to re-schedule it after fixing a bug) + original_queue_name TEXT NOT NULL, + -- This is the reason the job was DLQ'd. This should be for humans, but can include structured data if needed (keep in mind the original job will still exist) + reason TEXT NOT NULL, + -- This is the time the job was DLQ'd + dlq_time TIMESTAMPTZ NOT NULL +); + +-- These are just a starting point, supporting overriding the state for a given team, function or queue +-- For now these are entirely unused +CREATE TABLE IF NOT EXISTS cyclotron_team_control ( + team_id INT PRIMARY KEY, + state_override JobState, + -- If this is not null, it overrides the state of all jobs for this team (allowing for e.g. pausing or force failing all of a teams jobs) + state_override_expires TIMESTAMPTZ -- State override can be temporary or permanent +); + +CREATE TABLE IF NOT EXISTS cyclotron_function_control ( + function_id UUID PRIMARY KEY, + state_override JobState, + -- If this is not null, it overrides the state of all jobs for this function (allowing for e.g. pausing or force failing all of a functions jobs) + state_override_expires TIMESTAMPTZ -- State override can be temporary or permanent +); + +CREATE TABLE IF NOT EXISTS cyclotron_queue_control ( + queue_name TEXT PRIMARY KEY, + state_override JobState, + -- If this is not null, it overrides the state of all jobs for this queue (allowing for e.g. pausing or force failing all of a queues jobs) + state_override_expires TIMESTAMPTZ -- State override can be temporary or permanent +); \ No newline at end of file diff --git a/rust/cyclotron-core/migrations/20240823191751_bytes_over_text.sql b/rust/cyclotron-core/migrations/20240823191751_bytes_over_text.sql new file mode 100644 index 0000000000000..cbe476e3c30e7 --- /dev/null +++ b/rust/cyclotron-core/migrations/20240823191751_bytes_over_text.sql @@ -0,0 +1,5 @@ +ALTER TABLE cyclotron_jobs + ALTER COLUMN vm_state TYPE bytea USING vm_state::bytea, + ALTER COLUMN metadata TYPE bytea USING metadata::bytea, + ALTER COLUMN parameters TYPE bytea USING parameters::bytea, + ADD COLUMN blob bytea; diff --git a/rust/cyclotron-core/src/bin/create_test_data.rs b/rust/cyclotron-core/src/bin/create_test_data.rs new file mode 100644 index 0000000000000..2e194378dcd24 --- /dev/null +++ b/rust/cyclotron-core/src/bin/create_test_data.rs @@ -0,0 +1,53 @@ +use chrono::{Duration, Utc}; +use cyclotron_core::{JobInit, ManagerConfig, PoolConfig, QueueManager}; +use uuid::Uuid; + +// Just inserts jobs as fast as it can, choosing randomly between hog and fetch workers, and between different priorities. +// prints every 100 jobs inserted. +#[tokio::main] +async fn main() { + let pool_config = PoolConfig { + db_url: "postgresql://posthog:posthog@localhost:5432/cyclotron".to_string(), + max_connections: None, + min_connections: None, + acquire_timeout_seconds: None, + max_lifetime_seconds: None, + idle_timeout_seconds: None, + }; + + let manager_config = ManagerConfig { + shards: vec![pool_config.clone()], + shard_depth_limit: None, + shard_depth_check_interval_seconds: None, + }; + + let manager = QueueManager::new(manager_config).await.unwrap(); + + let now = Utc::now() - Duration::minutes(1); + let start = Utc::now(); + let mut count = 0; + loop { + let queue = if rand::random() { "fetch" } else { "hog" }; + + let priority = (rand::random::() % 3) as i16; + + let test_job = JobInit { + team_id: 1, + queue_name: queue.to_string(), + priority, + scheduled: now, + function_id: Some(Uuid::now_v7()), + vm_state: None, + parameters: None, + metadata: None, + blob: None, + }; + + manager.create_job(test_job).await.unwrap(); + + count += 1; + if count % 100 == 0 { + println!("Elapsed: {:?}, count: {}", Utc::now() - start, count); + } + } +} diff --git a/rust/cyclotron-core/src/bin/load_test.rs b/rust/cyclotron-core/src/bin/load_test.rs new file mode 100644 index 0000000000000..f000ab49c6e12 --- /dev/null +++ b/rust/cyclotron-core/src/bin/load_test.rs @@ -0,0 +1,163 @@ +use std::{ + sync::{atomic::AtomicUsize, Arc}, + time::Instant, +}; + +use chrono::{Duration, Utc}; +use cyclotron_core::{JobInit, JobState, ManagerConfig, PoolConfig, QueueManager, Worker}; +use futures::future::join_all; +use uuid::Uuid; + +// This spins up a manager and 2 workers, and tries to simulate semi-realistic load (on the DB - the workers do nothing except complete jobs) +// - The manager inserts jobs as fast as it can, choosing randomly between hog and fetch workers, and between different priorities. +// - The workers will process jobs as fast as they can, in batches of 1000. +// - The manager and both workers track how long each insert and dequeue takes, in ms/job. +// - The manager never inserts more than 10,000 more jobs than the workers have processed. +const INSERT_BATCH_SIZE: usize = 1000; + +struct SharedContext { + jobs_inserted: AtomicUsize, + jobs_dequeued: AtomicUsize, +} + +async fn producer_loop(manager: QueueManager, shared_context: Arc) { + let mut time_spent_inserting = Duration::zero(); + let now = Utc::now() - Duration::minutes(1); + loop { + let mut to_insert = Vec::with_capacity(1000); + for _ in 0..INSERT_BATCH_SIZE { + let queue = if rand::random() { "fetch" } else { "hog" }; + + let priority = (rand::random::() % 3) as i16; + + let test_job = JobInit { + team_id: 1, + queue_name: queue.to_string(), + priority, + scheduled: now, + function_id: Some(Uuid::now_v7()), + vm_state: None, + parameters: None, + blob: None, + metadata: None, + }; + + to_insert.push(test_job); + } + + let start = Instant::now(); + manager.bulk_create_jobs(to_insert).await; + let elapsed = start.elapsed(); + time_spent_inserting += Duration::from_std(elapsed).unwrap(); + + let inserted = shared_context + .jobs_inserted + .fetch_add(INSERT_BATCH_SIZE, std::sync::atomic::Ordering::Relaxed); + + println!("Inserted: {} in {}, ", inserted, time_spent_inserting); + let mut dequeued = shared_context + .jobs_dequeued + .load(std::sync::atomic::Ordering::Relaxed); + while inserted > dequeued + 10_000 { + println!( + "Waiting for workers to catch up, lagging by {}", + inserted - dequeued + ); + tokio::time::sleep(Duration::milliseconds(100).to_std().unwrap()).await; + dequeued = shared_context + .jobs_dequeued + .load(std::sync::atomic::Ordering::Relaxed); + } + } +} + +async fn worker_loop(worker: Worker, shared_context: Arc, queue: &str) { + let mut time_spent_dequeuing = Duration::zero(); + let start = Utc::now(); + loop { + let loop_start = Instant::now(); + let jobs = worker.dequeue_jobs(queue, 1000).await.unwrap(); + + if jobs.is_empty() { + println!( + "Worker {:?} outpacing inserts, got no jobs, sleeping!", + queue + ); + tokio::time::sleep(Duration::milliseconds(100).to_std().unwrap()).await; + continue; + } + + let mut futs = Vec::with_capacity(jobs.len()); + for job in &jobs { + worker.set_state(job.id, JobState::Completed).unwrap(); + futs.push(worker.flush_job(job.id)); + } + + for res in join_all(futs).await { + res.unwrap(); + } + + time_spent_dequeuing += Duration::from_std(loop_start.elapsed()).unwrap(); + + let dequeued = shared_context + .jobs_dequeued + .fetch_add(jobs.len(), std::sync::atomic::Ordering::Relaxed); + + // To account for the bunch we just handled + let dequeued = dequeued + jobs.len(); + + println!( + "Dequeued, processed and completed {} jobs in {} for {:?}. Total time running: {}", + dequeued, + time_spent_dequeuing, + queue, + Utc::now() - start + ); + + if jobs.len() < 1000 { + println!( + "Worker {:?} outpacing manager, only got {} jobs, sleeping!", + queue, + jobs.len() + ); + tokio::time::sleep(Duration::milliseconds(100).to_std().unwrap()).await; + } + } +} + +#[tokio::main] +async fn main() { + let pool_config = PoolConfig { + db_url: "postgresql://posthog:posthog@localhost:5432/cyclotron".to_string(), + max_connections: None, + min_connections: None, + acquire_timeout_seconds: None, + max_lifetime_seconds: None, + idle_timeout_seconds: None, + }; + + let manager_config = ManagerConfig { + shards: vec![pool_config.clone()], + shard_depth_limit: None, + shard_depth_check_interval_seconds: None, + }; + + let shared_context = Arc::new(SharedContext { + jobs_inserted: AtomicUsize::new(0), + jobs_dequeued: AtomicUsize::new(0), + }); + + let manager = QueueManager::new(manager_config).await.unwrap(); + let worker_1 = Worker::new(pool_config.clone()).await.unwrap(); + let worker_2 = Worker::new(pool_config.clone()).await.unwrap(); + + let producer = producer_loop(manager, shared_context.clone()); + let worker_1 = worker_loop(worker_1, shared_context.clone(), "fetch"); + let worker_2 = worker_loop(worker_2, shared_context.clone(), "hog"); + + let producer = tokio::spawn(producer); + let worker_1 = tokio::spawn(worker_1); + let worker_2 = tokio::spawn(worker_2); + + tokio::try_join!(producer, worker_1, worker_2).unwrap(); +} diff --git a/rust/cyclotron-core/src/config.rs b/rust/cyclotron-core/src/config.rs new file mode 100644 index 0000000000000..8304816671435 --- /dev/null +++ b/rust/cyclotron-core/src/config.rs @@ -0,0 +1,42 @@ +use std::time::Duration; + +use serde::{Deserialize, Serialize}; +use sqlx::{pool::PoolOptions, PgPool}; + +// A pool config object, designed to be passable across API boundaries +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct PoolConfig { + pub db_url: String, + pub max_connections: Option, // Default to 10 + pub min_connections: Option, // Default to 1 + pub acquire_timeout_seconds: Option, // Default to 30 + pub max_lifetime_seconds: Option, // Default to 300 + pub idle_timeout_seconds: Option, // Default to 60 +} + +impl PoolConfig { + pub async fn connect(&self) -> Result { + let builder = PoolOptions::new() + .max_connections(self.max_connections.unwrap_or(10)) + .min_connections(self.min_connections.unwrap_or(1)) + .max_lifetime(Duration::from_secs( + self.max_lifetime_seconds.unwrap_or(300), + )) + .idle_timeout(Duration::from_secs(self.idle_timeout_seconds.unwrap_or(60))) + .acquire_timeout(Duration::from_secs( + self.acquire_timeout_seconds.unwrap_or(30), + )); + + builder.connect(&self.db_url).await + } +} + +pub const DEFAULT_QUEUE_DEPTH_LIMIT: u64 = 10_000; +pub const DEFAULT_SHARD_HEALTH_CHECK_INTERVAL: u64 = 10; + +#[derive(Debug, Serialize, Deserialize)] +pub struct ManagerConfig { + pub shards: Vec, + pub shard_depth_limit: Option, // Defaults to 10_000 available jobs per shard + pub shard_depth_check_interval_seconds: Option, // Defaults to 10 seconds - checking shard capacity +} diff --git a/rust/cyclotron-core/src/error.rs b/rust/cyclotron-core/src/error.rs new file mode 100644 index 0000000000000..4e870e75a2d59 --- /dev/null +++ b/rust/cyclotron-core/src/error.rs @@ -0,0 +1,17 @@ +use uuid::Uuid; + +#[derive(Debug, thiserror::Error)] +pub enum QueueError { + #[error("sqlx error: {0}")] + SqlxError(#[from] sqlx::Error), + #[error("Unknown job id: {0}")] + UnknownJobId(Uuid), + #[error("Job {0} flushed without a new state, which would leave it in a running state forever (or until reaped)")] + FlushWithoutNextState(Uuid), + #[error("Invalid lock {0} used to update job {1}. This usually means a job has been reaped from under a worker - did you forget to set the heartbeat?")] + InvalidLock(Uuid, Uuid), + #[error("Shard over capacity {0} for this manager, insert aborted")] + ShardFull(u64), + #[error("Timed waiting for shard to have capacity")] + TimedOutWaitingForCapacity, +} diff --git a/rust/cyclotron-core/src/janitor.rs b/rust/cyclotron-core/src/janitor.rs new file mode 100644 index 0000000000000..8fd98307fba67 --- /dev/null +++ b/rust/cyclotron-core/src/janitor.rs @@ -0,0 +1,80 @@ +use crate::DEAD_LETTER_QUEUE; +use chrono::Duration; +use sqlx::PgPool; + +use crate::{ + ops::{ + janitor::{ + delete_completed_jobs, delete_failed_jobs, detect_poison_pills, reset_stalled_jobs, + }, + meta::{count_total_waiting_jobs, dead_letter, run_migrations}, + }, + PoolConfig, QueueError, +}; + +// Thin layer on top of the raw janitor operations - mostly just avoids users having to take a dep on sqlx +pub struct Janitor { + pub pool: PgPool, +} + +impl Janitor { + pub async fn new(config: PoolConfig) -> Result { + let pool = config.connect().await?; + Ok(Self { pool }) + } + + pub fn from_pool(pool: PgPool) -> Self { + Self { pool } + } + + pub async fn run_migrations(&self) { + run_migrations(&self.pool).await; + } + + pub async fn delete_completed_jobs(&self) -> Result { + delete_completed_jobs(&self.pool).await + } + + pub async fn delete_failed_jobs(&self) -> Result { + delete_failed_jobs(&self.pool).await + } + + pub async fn reset_stalled_jobs(&self, timeout: Duration) -> Result { + reset_stalled_jobs(&self.pool, timeout).await + } + + pub async fn delete_poison_pills( + &self, + timeout: Duration, + max_janitor_touched: i16, + ) -> Result { + let poison = detect_poison_pills(&self.pool, timeout, max_janitor_touched).await?; + + for job in &poison { + dead_letter( + &self.pool, + *job, + &format!("poison pill detected based on a timeout of {}", timeout), + ) + .await?; + } + + Ok(poison.len() as u64) + } + + pub async fn waiting_jobs(&self) -> Result { + count_total_waiting_jobs(&self.pool).await + } + + pub async fn count_dlq_depth(&self) -> Result { + let result = sqlx::query_scalar!( + "SELECT COUNT(*) FROM cyclotron_jobs WHERE queue_name = $1", + DEAD_LETTER_QUEUE + ) + .fetch_one(&self.pool) + .await + .map_err(QueueError::from)?; + + Ok(result.unwrap_or(0) as u64) + } +} diff --git a/rust/cyclotron-core/src/lib.rs b/rust/cyclotron-core/src/lib.rs new file mode 100644 index 0000000000000..e737f38360165 --- /dev/null +++ b/rust/cyclotron-core/src/lib.rs @@ -0,0 +1,47 @@ +mod ops; + +// We do this pattern (privately use a module, then re-export parts of it) so we can refactor/rename or generally futz around with the internals without breaking the public API + +// Types +mod types; +pub use types::BulkInsertResult; +pub use types::Bytes; +pub use types::Job; +pub use types::JobInit; +pub use types::JobState; +pub use types::JobUpdate; + +// Errors +mod error; +pub use error::QueueError; + +// Manager +mod manager; +pub use manager::QueueManager; + +// Worker +mod worker; +pub use worker::Worker; + +// Janitor +mod janitor; +pub use janitor::Janitor; + +// Config +mod config; +pub use config::ManagerConfig; +pub use config::PoolConfig; + +// The shard id is a fixed value that is set by the janitor when it starts up. +// Workers may use this value when reporting metrics. The `Worker` struct provides +// a method for fetching this value, that caches it appropriately such that it's safe +// to call frequently, while still being up-to-date (even though it should "never" change) +pub const SHARD_ID_KEY: &str = "shard_id"; + +// This isn't pub because, ideally, nothing using the core will ever need to know it. +const DEAD_LETTER_QUEUE: &str = "_cyclotron_dead_letter"; + +#[doc(hidden)] +pub mod test_support { + pub use crate::manager::Shard; +} diff --git a/rust/cyclotron-core/src/manager.rs b/rust/cyclotron-core/src/manager.rs new file mode 100644 index 0000000000000..6339c4e9cf4ed --- /dev/null +++ b/rust/cyclotron-core/src/manager.rs @@ -0,0 +1,225 @@ +use std::sync::atomic::AtomicUsize; + +use chrono::{DateTime, Duration, Utc}; +use sqlx::PgPool; +use tokio::sync::RwLock; + +use crate::{ + config::{DEFAULT_QUEUE_DEPTH_LIMIT, DEFAULT_SHARD_HEALTH_CHECK_INTERVAL}, + ops::{ + manager::{bulk_create_jobs, create_job}, + meta::count_total_waiting_jobs, + }, + BulkInsertResult, JobInit, ManagerConfig, QueueError, +}; + +pub struct Shard { + pub pool: PgPool, + pub last_healthy: RwLock>, + pub check_interval: Duration, + pub depth_limit: u64, +} + +pub struct QueueManager { + shards: RwLock>, + next_shard: AtomicUsize, +} + +impl QueueManager { + pub async fn new(config: ManagerConfig) -> Result { + let mut shards = vec![]; + let depth_limit = config + .shard_depth_limit + .unwrap_or(DEFAULT_QUEUE_DEPTH_LIMIT); + let check_interval = Duration::seconds( + config + .shard_depth_check_interval_seconds + .unwrap_or(DEFAULT_SHARD_HEALTH_CHECK_INTERVAL) as i64, + ); + for shard in config.shards { + let pool = shard.connect().await.unwrap(); + let shard = Shard::new(pool, depth_limit, check_interval); + shards.push(shard); + } + Ok(Self { + shards: RwLock::new(shards), + next_shard: AtomicUsize::new(0), + }) + } + + #[doc(hidden)] // Mostly for testing, but safe to expose + pub fn from_pool(pool: PgPool) -> Self { + Self { + shards: RwLock::new(vec![Shard::new( + pool, + DEFAULT_QUEUE_DEPTH_LIMIT, + Duration::seconds(DEFAULT_SHARD_HEALTH_CHECK_INTERVAL as i64), + )]), + next_shard: AtomicUsize::new(0), + } + } + + pub async fn create_job(&self, init: JobInit) -> Result<(), QueueError> { + // TODO - here is where a lot of shard health and failover logic will go, eventually. + let next = self + .next_shard + .fetch_add(1, std::sync::atomic::Ordering::Relaxed); + let shards = self.shards.read().await; + let shard = &shards[next % shards.len()]; + shard.create_job(init).await + } + + pub async fn create_job_blocking( + &self, + init: JobInit, + timeout: Option, + ) -> Result<(), QueueError> { + let next = self + .next_shard + .fetch_add(1, std::sync::atomic::Ordering::Relaxed); + let shards = self.shards.read().await; + let shard = &shards[next % shards.len()]; + shard.create_job_blocking(init, timeout).await + } + + pub async fn bulk_create_jobs(&self, inits: Vec) -> BulkInsertResult { + let shards = self.shards.read().await; + let chunk_size = inits.len() / shards.len(); + let mut result = BulkInsertResult::new(); + // TODO - at some point, we should dynamically re-acquire the lock each time, to allow + // for re-routing jobs away from a bad shard during a bulk insert, but right now, we + // don't even re-try inserts. Later work. + for chunk in inits.chunks(chunk_size) { + let next_shard = self + .next_shard + .fetch_add(1, std::sync::atomic::Ordering::Relaxed); + let shard = &shards[next_shard % shards.len()]; + let shard_result = shard.bulk_create_jobs(chunk).await; + if let Err(err) = shard_result { + result.add_failure(err, chunk.to_vec()); + } + } + + result + } + + pub async fn bulk_create_jobs_blocking( + &self, + inits: Vec, + timeout: Option, + ) -> BulkInsertResult { + let shards = self.shards.read().await; + let chunk_size = inits.len() / shards.len(); + let mut result = BulkInsertResult::new(); + for chunk in inits.chunks(chunk_size) { + let next_shard = self + .next_shard + .fetch_add(1, std::sync::atomic::Ordering::Relaxed); + let shard = &shards[next_shard % shards.len()]; + // TODO - we sequentially try each shard, but we could try to parallelize this. + let shard_result = shard.bulk_create_jobs_blocking(chunk, timeout).await; + if let Err(err) = shard_result { + result.add_failure(err, chunk.to_vec()); + } + } + + result + } +} + +impl Shard { + pub fn new(pool: PgPool, depth_limit: u64, check_interval: Duration) -> Self { + Self { + pool, + last_healthy: RwLock::new(Utc::now() - check_interval), + check_interval, + depth_limit, + } + } + + // Inserts a job, failing if the shard is at capacity + pub async fn create_job(&self, init: JobInit) -> Result<(), QueueError> { + self.insert_guard().await?; + create_job(&self.pool, init).await + } + + // Inserts a vec of jobs, failing if the shard is at capacity. Note "capacity" here just + // means "it isn't totally full" - if there's "capacity" for 1 job, and this is a vec of + // 1000, we still insert all 1000. + pub async fn bulk_create_jobs(&self, inits: &[JobInit]) -> Result<(), QueueError> { + self.insert_guard().await?; + bulk_create_jobs(&self.pool, inits).await + } + + // Inserts a job, blocking until there's capacity (or until the timeout is reached) + pub async fn create_job_blocking( + &self, + init: JobInit, + timeout: Option, + ) -> Result<(), QueueError> { + let start = Utc::now(); + while self.is_full().await? { + tokio::time::sleep(Duration::milliseconds(100).to_std().unwrap()).await; + if let Some(timeout) = &timeout { + if Utc::now() - start > *timeout { + return Err(QueueError::TimedOutWaitingForCapacity); + } + } + } + + create_job(&self.pool, init).await + } + + // As above, with the same caveats about what "capacity" means + pub async fn bulk_create_jobs_blocking( + &self, + inits: &[JobInit], + timeout: Option, + ) -> Result<(), QueueError> { + let start = Utc::now(); + while self.is_full().await? { + tokio::time::sleep(Duration::milliseconds(100).to_std().unwrap()).await; + if let Some(timeout) = &timeout { + if Utc::now() - start > *timeout { + return Err(QueueError::TimedOutWaitingForCapacity); + } + } + } + + bulk_create_jobs(&self.pool, inits).await + } + + pub async fn insert_guard(&self) -> Result<(), QueueError> { + if self.is_full().await? { + return Err(QueueError::ShardFull(self.depth_limit)); + } + + Ok(()) + } + + pub async fn is_full(&self) -> Result { + let last_healthy = self.last_healthy.read().await; + // If we were healthy less than the check interval ago, assume we are still + if Utc::now() - *last_healthy < self.check_interval { + return Ok(false); + } + + // Grab a write lock. This constrains the number of concurrent capacity checks + // to 1, purposefully - if someone spawns a thousand tasks to blockingly create + // a job, we don't want all of them to be querying the available count at once. + drop(last_healthy); + let mut last_healthy = self.last_healthy.write().await; + // TOCTOU - multiple tasks could be racing to re-do the check, and the firs time one + // succeeds all the rest should skip it. + if Utc::now() - *last_healthy < self.check_interval { + return Ok(false); + } + + let pending = count_total_waiting_jobs(&self.pool).await?; + let is_full = pending >= self.depth_limit; + if !is_full { + *last_healthy = Utc::now(); + } + Ok(is_full) + } +} diff --git a/rust/cyclotron-core/src/ops/janitor.rs b/rust/cyclotron-core/src/ops/janitor.rs new file mode 100644 index 0000000000000..16bdb9180f0f9 --- /dev/null +++ b/rust/cyclotron-core/src/ops/janitor.rs @@ -0,0 +1,83 @@ +use chrono::{Duration, Utc}; +use uuid::Uuid; + +use crate::error::QueueError; + +// As a general rule, janitor operations are not queue specific (as in, they don't account for the +// queue name). We can revisit this later, if we decide we need the ability to do janitor operations +// on a per-queue basis. +pub async fn delete_completed_jobs<'c, E>(executor: E) -> Result +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let result = sqlx::query!("DELETE FROM cyclotron_jobs WHERE state = 'completed'") + .execute(executor) + .await + .map_err(QueueError::from)?; + + Ok(result.rows_affected()) +} + +pub async fn delete_failed_jobs<'c, E>(executor: E) -> Result +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let result = sqlx::query!("DELETE FROM cyclotron_jobs WHERE state = 'failed'") + .execute(executor) + .await + .map_err(QueueError::from)?; + + Ok(result.rows_affected()) +} + +// Jobs are considered stalled if their lock is held and their last_heartbeat is older than `timeout`. +// +// TODO - this /could/ return the lock_id's held, which might help with debugging (if workers reported +// the lock_id's they dequeue'd), but lets not do that right now. +pub async fn reset_stalled_jobs<'c, E>(executor: E, timeout: Duration) -> Result +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let oldest_valid_heartbeat = Utc::now() - timeout; + let result = sqlx::query!(r#" +WITH stalled AS ( + SELECT id FROM cyclotron_jobs WHERE state = 'running' AND COALESCE(last_heartbeat, $1) <= $1 FOR UPDATE SKIP LOCKED +) +UPDATE cyclotron_jobs +SET state = 'available', lock_id = NULL, last_heartbeat = NULL, janitor_touch_count = janitor_touch_count + 1 +FROM stalled +WHERE cyclotron_jobs.id = stalled.id + "#, + oldest_valid_heartbeat + ) + .execute(executor) + .await + .map_err(QueueError::from)?; + + Ok(result.rows_affected()) +} + +// Poison pills are stalled jobs that have been reset by the janitor more than `max_janitor_touched` times. +pub async fn detect_poison_pills<'c, E>( + executor: E, + timeout: Duration, + max_janitor_touched: i16, +) -> Result, QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let oldest_valid_heartbeat = Utc::now() - timeout; + // KLUDGE - the lock_id being set isn't checked here. A job in a running state without a lock id is violating an invariant, + // and would be useful to report. + let result = sqlx::query_scalar!( + r#" +SELECT id FROM cyclotron_jobs WHERE state = 'running' AND COALESCE(last_heartbeat, $1) <= $1 AND janitor_touch_count >= $2 + "#, + oldest_valid_heartbeat, + max_janitor_touched + ).fetch_all(executor) + .await + .map_err(QueueError::from)?; + + Ok(result) +} diff --git a/rust/cyclotron-core/src/ops/manager.rs b/rust/cyclotron-core/src/ops/manager.rs new file mode 100644 index 0000000000000..b0a51403439b9 --- /dev/null +++ b/rust/cyclotron-core/src/ops/manager.rs @@ -0,0 +1,167 @@ +use chrono::{DateTime, Utc}; +use uuid::Uuid; + +use crate::{ + error::QueueError, + types::{JobInit, JobState}, +}; + +pub async fn create_job<'c, E>(executor: E, data: JobInit) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let id = Uuid::now_v7(); + sqlx::query!( + r#" +INSERT INTO cyclotron_jobs + ( + id, + team_id, + function_id, + created, + lock_id, + last_heartbeat, + janitor_touch_count, + transition_count, + last_transition, + queue_name, + state, + scheduled, + priority, + vm_state, + metadata, + parameters, + blob + ) +VALUES + ($1, $2, $3, NOW(), NULL, NULL, 0, 0, NOW(), $4, $5, $6, $7, $8, $9, $10, $11) + "#, + id, + data.team_id, + data.function_id, + data.queue_name, + JobState::Available as _, + data.scheduled, + data.priority, + data.vm_state, + data.metadata, + data.parameters, + data.blob + ) + .execute(executor) + .await?; + + Ok(()) +} + +pub async fn bulk_create_jobs<'c, E>(executor: E, jobs: &[JobInit]) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let now = Utc::now(); + // Flatten these jobs into a series of vecs of arguments PG can unnest + let mut ids = Vec::with_capacity(jobs.len()); + let mut team_ids = Vec::with_capacity(jobs.len()); + let mut function_ids = Vec::with_capacity(jobs.len()); + let mut created_at = Vec::with_capacity(jobs.len()); + let mut lock_ids = Vec::with_capacity(jobs.len()); + let mut last_heartbeats = Vec::with_capacity(jobs.len()); + let mut janitor_touch_counts = Vec::with_capacity(jobs.len()); + let mut transition_counts = Vec::with_capacity(jobs.len()); + let mut last_transitions = Vec::with_capacity(jobs.len()); + let mut queue_names = Vec::with_capacity(jobs.len()); + let mut states = Vec::with_capacity(jobs.len()); + let mut scheduleds = Vec::with_capacity(jobs.len()); + let mut priorities = Vec::with_capacity(jobs.len()); + let mut vm_states = Vec::with_capacity(jobs.len()); + let mut metadatas = Vec::with_capacity(jobs.len()); + let mut parameters = Vec::with_capacity(jobs.len()); + let mut blob = Vec::with_capacity(jobs.len()); + + for d in jobs { + ids.push(Uuid::now_v7()); + team_ids.push(d.team_id); + function_ids.push(d.function_id); + created_at.push(now); + lock_ids.push(None::); + last_heartbeats.push(None::>); + janitor_touch_counts.push(0); + transition_counts.push(0); + last_transitions.push(now); + queue_names.push(d.queue_name.clone()); + states.push(JobState::Available); + scheduleds.push(d.scheduled); + priorities.push(d.priority); + vm_states.push(d.vm_state.clone()); + metadatas.push(d.metadata.clone()); + parameters.push(d.parameters.clone()); + blob.push(d.blob.clone()); + } + + // Using the "unnest" function to turn an array of rows into a set of rows + sqlx::query( + r#" +INSERT INTO cyclotron_jobs + ( + id, + team_id, + function_id, + created, + lock_id, + last_heartbeat, + janitor_touch_count, + transition_count, + last_transition, + queue_name, + state, + scheduled, + priority, + vm_state, + metadata, + parameters, + blob + ) +SELECT * +FROM UNNEST( + $1, + $2, + $3, + $4, + $5, + $6, + $7, + $8, + $9, + $10, + $11, + $12, + $13, + $14, + $15, + $16, + $17 + ) +"#, + ) + .bind(ids) + .bind(team_ids) + .bind(function_ids) + .bind(created_at) + .bind(lock_ids) + .bind(last_heartbeats) + .bind(janitor_touch_counts) + .bind(transition_counts) + .bind(last_transitions) + .bind(queue_names) + .bind(states) + .bind(scheduleds) + .bind(priorities) + .bind(vm_states) + .bind(metadatas) + .bind(parameters) + .bind(blob) + .execute(executor) + .await?; + + Ok(()) +} diff --git a/rust/cyclotron-core/src/ops/meta.rs b/rust/cyclotron-core/src/ops/meta.rs new file mode 100644 index 0000000000000..d48acd88bc188 --- /dev/null +++ b/rust/cyclotron-core/src/ops/meta.rs @@ -0,0 +1,78 @@ +use sqlx::{postgres::PgQueryResult, PgPool}; +use uuid::Uuid; + +use crate::{error::QueueError, DEAD_LETTER_QUEUE}; + +pub async fn count_total_waiting_jobs<'c, E>(executor: E) -> Result +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let res = sqlx::query!( + "SELECT COUNT(*) FROM cyclotron_jobs WHERE state = 'available' AND scheduled <= NOW()", + ) + .fetch_one(executor) + .await?; + + let res = res.count.unwrap_or(0); + Ok(res as u64) +} + +pub fn throw_if_no_rows(res: PgQueryResult, job: Uuid, lock: Uuid) -> Result<(), QueueError> { + if res.rows_affected() == 0 { + Err(QueueError::InvalidLock(lock, job)) + } else { + Ok(()) + } +} + +/// Run the latest cyclotron migrations. Panics if the migrations can't be run - failure to run migrations is purposefully fatal. +pub async fn run_migrations(pool: &PgPool) { + sqlx::migrate!("./migrations") + .run(pool) + .await + .expect("Failed to run migrations"); +} + +/// Move a job into the dead letter queue, also updating the metadata table. Note that this operation does not +/// require a lock on the job. This is because the janitor needs to DLQ jobs that are stalled. The worker wrapper +/// around this operation should check that the job is "known" (owned by it) before calling this function. +pub async fn dead_letter<'c, E>(executor: E, job: Uuid, reason: &str) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres> + Clone, +{ + // The first thing we do here is forcefully take the lock on this job, ensuring any subsequent worker + // operations will fail - we do this because the janitor can move jobs out from under workers. We mark + // the job as "running" and heartbeat so nothing else messes with it. + let lock = Uuid::now_v7(); + let original_queue_name = sqlx::query_scalar!( + "UPDATE cyclotron_jobs SET state = 'running', lock_id = $1, last_heartbeat=NOW() WHERE id = $2 returning queue_name", + lock, + job + ) + .fetch_optional(executor.clone()) + .await?; + + let Some(original_queue_name) = original_queue_name else { + return Err(QueueError::UnknownJobId(job)); + }; + + // Now we add an entry to the dead metadata queue + sqlx::query!( + "INSERT INTO cyclotron_dead_letter_metadata (job_id, original_queue_name, reason, dlq_time) VALUES ($1, $2, $3, NOW())", + job, + original_queue_name, + reason + ).execute(executor.clone()).await?; + + // And finally, we move the job to the dead letter queue. Jobs in the DLQ are "available", because if they ever + // get moved back to a queue, they should be re-run. + sqlx::query!( + "UPDATE cyclotron_jobs SET state = 'available', lock_id = NULL, queue_name = $1 WHERE id = $2", + DEAD_LETTER_QUEUE, + job + ) + .execute(executor) + .await?; + + Ok(()) +} diff --git a/rust/cyclotron-core/src/ops/mod.rs b/rust/cyclotron-core/src/ops/mod.rs new file mode 100644 index 0000000000000..e0848468bbc89 --- /dev/null +++ b/rust/cyclotron-core/src/ops/mod.rs @@ -0,0 +1,4 @@ +pub mod janitor; +pub mod manager; +pub mod meta; +pub mod worker; diff --git a/rust/cyclotron-core/src/ops/worker.rs b/rust/cyclotron-core/src/ops/worker.rs new file mode 100644 index 0000000000000..c7b0f10c86530 --- /dev/null +++ b/rust/cyclotron-core/src/ops/worker.rs @@ -0,0 +1,422 @@ +use chrono::{DateTime, Utc}; +use sqlx::{postgres::PgArguments, query::Query}; +use uuid::Uuid; + +use crate::{ + error::QueueError, + types::{Bytes, Job, JobState, JobUpdate}, +}; + +use super::meta::throw_if_no_rows; + +// Dequeue the next job batch from the queue, skipping VM state since it can be large +pub async fn dequeue_jobs<'c, E>( + executor: E, + queue: &str, + max: usize, +) -> Result, QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + // Transient lock id. This could be a worker ID, or something, but for now it's totally random (per-batch) + let lock_id = Uuid::now_v7(); + Ok(sqlx::query_as!( + Job, + r#" +WITH available AS ( + SELECT + id, + state + FROM cyclotron_jobs + WHERE + state = 'available'::JobState + AND queue_name = $1 + AND scheduled <= NOW() + ORDER BY + priority ASC, + scheduled ASC + LIMIT $2 + FOR UPDATE SKIP LOCKED +) +UPDATE cyclotron_jobs +SET + state = 'running'::JobState, + lock_id = $3, + last_heartbeat = NOW(), + last_transition = NOW(), + transition_count = transition_count + 1 +FROM available +WHERE + cyclotron_jobs.id = available.id +RETURNING + cyclotron_jobs.id, + team_id, + available.state as "state: JobState", + queue_name, + priority, + function_id, + created, + last_transition, + scheduled, + transition_count, + NULL::bytea as vm_state, + metadata, + parameters, + blob, + lock_id, + last_heartbeat, + janitor_touch_count + "#, + queue, + max as i64, + lock_id + ) + .fetch_all(executor) + .await?) +} + +// Dequeue a batch of jobs, with their VM state. +pub async fn dequeue_with_vm_state<'c, E>( + executor: E, + queue: &str, + max: usize, +) -> Result, QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let lock_id = Uuid::now_v7(); + Ok(sqlx::query_as!( + Job, + r#" +WITH available AS ( + SELECT + id, + state + FROM cyclotron_jobs + WHERE + state = 'available'::JobState + AND queue_name = $1 + AND scheduled <= NOW() + ORDER BY + priority ASC, + scheduled ASC + LIMIT $2 + FOR UPDATE SKIP LOCKED +) +UPDATE cyclotron_jobs +SET + state = 'running'::JobState, + lock_id = $3, + last_heartbeat = NOW(), + last_transition = NOW(), + transition_count = transition_count + 1 +FROM available +WHERE + cyclotron_jobs.id = available.id +RETURNING + cyclotron_jobs.id, + team_id, + available.state as "state: JobState", + queue_name, + priority, + function_id, + created, + last_transition, + scheduled, + transition_count, + vm_state, + metadata, + parameters, + blob, + lock_id, + last_heartbeat, + janitor_touch_count + "#, + queue, + max as i64, + lock_id + ) + .fetch_all(executor) + .await?) +} + +pub async fn get_vm_state<'c, E>( + executor: E, + job_id: Uuid, + lock_id: Uuid, +) -> Result, QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + struct VMState { + vm_state: Option, + } + + let res = sqlx::query_as!( + VMState, + "SELECT vm_state FROM cyclotron_jobs WHERE id = $1 AND lock_id = $2", + job_id, + lock_id + ) + .fetch_one(executor) + .await?; + + Ok(res.vm_state) +} + +// TODO - this isn't the cheapest way to update a row in a table... we could probably do better by instead +// using a query builder, but that means no longer using query_as! and query! macros, unfortunately. +// If/when we start hitting perf issues, this is a good place to start. +// +// NOTE - this clears the lock_id when the job state is set to anything other than "running", since that indicates +// the worker is finished with the job. This means subsequent flushes with the same lock_id will fail. +pub async fn flush_job<'c, C>( + connection: &mut C, + job_id: Uuid, + updates: JobUpdate, +) -> Result<(), QueueError> +where + C: sqlx::Connection, +{ + let mut txn = connection.begin().await?; + + let job_returned = !matches!(updates.state, Some(JobState::Running)); + let lock_id = updates.lock_id; + + if let Some(state) = updates.state { + set_state(&mut *txn, job_id, lock_id, state).await?; + } + + if let Some(queue_name) = updates.queue_name { + set_queue(&mut *txn, job_id, &queue_name, lock_id).await?; + } + + if let Some(priority) = updates.priority { + set_priority(&mut *txn, job_id, lock_id, priority).await?; + } + + if let Some(scheduled) = updates.scheduled { + set_scheduled(&mut *txn, job_id, scheduled, lock_id).await?; + } + + if let Some(vm_state) = updates.vm_state { + set_vm_state(&mut *txn, job_id, vm_state, lock_id).await?; + } + + if let Some(metadata) = updates.metadata { + set_metadata(&mut *txn, job_id, metadata, lock_id).await?; + } + + if let Some(parameters) = updates.parameters { + set_parameters(&mut *txn, job_id, parameters, lock_id).await?; + } + + if let Some(blob) = updates.blob { + set_blob(&mut *txn, job_id, blob, lock_id).await?; + } + + // Calling flush indicates forward progress, so we should touch the heartbeat + set_heartbeat(&mut *txn, job_id, lock_id).await?; + + // We do this here, instead of in the set_state call, because otherwise the lock_id passed to other + // updates would be invalid + if job_returned { + let query = sqlx::query!( + "UPDATE cyclotron_jobs SET lock_id = NULL, last_heartbeat = NULL WHERE id = $1 AND lock_id = $2", + job_id, + lock_id + ); + assert_does_update(&mut *txn, job_id, lock_id, query).await?; + } + + txn.commit().await?; + + Ok(()) +} + +// ---------------------- +// Setters +// +// Most of the rest of these functions are designed to be used as part of larger transactions, e.g. +// "completing" a job means updating various rows and then marking it complete, and we can build that +// by composing a set of individual queries together using a transaction. +// +// ---------------------- + +// Update the state of a job, also tracking the transition count and last transition time +pub async fn set_state<'c, E>( + executor: E, + job_id: Uuid, + lock_id: Uuid, + state: JobState, +) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let q = sqlx::query!( + r#"UPDATE cyclotron_jobs + SET state = $1, last_transition = NOW(), transition_count = transition_count + 1 + WHERE id = $2 AND lock_id = $3"#, + state as _, + job_id, + lock_id + ); + + assert_does_update(executor, job_id, lock_id, q).await +} + +pub async fn set_queue<'c, E>( + executor: E, + job_id: Uuid, + queue: &str, + lock_id: Uuid, +) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let q = sqlx::query!( + "UPDATE cyclotron_jobs SET queue_name = $1 WHERE id = $2 AND lock_id = $3", + queue, + job_id, + lock_id + ); + assert_does_update(executor, job_id, lock_id, q).await +} + +pub async fn set_priority<'c, E>( + executor: E, + job_id: Uuid, + lock_id: Uuid, + priority: i16, +) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let q = sqlx::query!( + "UPDATE cyclotron_jobs SET priority = $1 WHERE id = $2 AND lock_id = $3", + priority, + job_id, + lock_id + ); + assert_does_update(executor, job_id, lock_id, q).await +} + +pub async fn set_scheduled<'c, E>( + executor: E, + job_id: Uuid, + scheduled: DateTime, + lock_id: Uuid, +) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let q = sqlx::query!( + "UPDATE cyclotron_jobs SET scheduled = $1 WHERE id = $2 AND lock_id = $3", + scheduled, + job_id, + lock_id + ); + assert_does_update(executor, job_id, lock_id, q).await +} + +pub async fn set_vm_state<'c, E>( + executor: E, + job_id: Uuid, + vm_state: Option, + lock_id: Uuid, +) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let q = sqlx::query!( + "UPDATE cyclotron_jobs SET vm_state = $1 WHERE id = $2 AND lock_id = $3", + vm_state, + job_id, + lock_id + ); + assert_does_update(executor, job_id, lock_id, q).await +} + +pub async fn set_metadata<'c, E>( + executor: E, + job_id: Uuid, + metadata: Option, + lock_id: Uuid, +) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let q = sqlx::query!( + "UPDATE cyclotron_jobs SET metadata = $1 WHERE id = $2 AND lock_id = $3", + metadata, + job_id, + lock_id + ); + assert_does_update(executor, job_id, lock_id, q).await +} + +pub async fn set_parameters<'c, E>( + executor: E, + job_id: Uuid, + parameters: Option, + lock_id: Uuid, +) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let q = sqlx::query!( + "UPDATE cyclotron_jobs SET parameters = $1 WHERE id = $2 AND lock_id = $3", + parameters, + job_id, + lock_id + ); + assert_does_update(executor, job_id, lock_id, q).await +} + +pub async fn set_blob<'c, E>( + executor: E, + job_id: Uuid, + blob: Option, + lock_id: Uuid, +) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let q = sqlx::query!( + "UPDATE cyclotron_jobs SET blob = $1 WHERE id = $2 AND lock_id = $3", + blob, + job_id, + lock_id + ); + assert_does_update(executor, job_id, lock_id, q).await +} + +pub async fn set_heartbeat<'c, E>( + executor: E, + job_id: Uuid, + lock_id: Uuid, +) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let q = sqlx::query!( + "UPDATE cyclotron_jobs SET last_heartbeat = NOW() WHERE id = $1 AND lock_id = $2", + job_id, + lock_id + ); + assert_does_update(executor, job_id, lock_id, q).await +} + +// Simple wrapper, that just executes a query and throws an error if no rows were affected +async fn assert_does_update<'c, E>( + executor: E, + job_id: Uuid, + lock_id: Uuid, + query: Query<'_, sqlx::Postgres, PgArguments>, +) -> Result<(), QueueError> +where + E: sqlx::Executor<'c, Database = sqlx::Postgres>, +{ + let res = query.execute(executor).await?; + throw_if_no_rows(res, job_id, lock_id) +} diff --git a/rust/cyclotron-core/src/types.rs b/rust/cyclotron-core/src/types.rs new file mode 100644 index 0000000000000..5adf86c6050b4 --- /dev/null +++ b/rust/cyclotron-core/src/types.rs @@ -0,0 +1,145 @@ +use std::str::FromStr; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::postgres::{PgHasArrayType, PgTypeInfo}; +use uuid::Uuid; + +use crate::QueueError; + +pub type Bytes = Vec; + +#[derive(Debug, Deserialize, Serialize, sqlx::Type)] +#[serde(rename_all = "lowercase")] +#[sqlx(type_name = "JobState", rename_all = "lowercase")] +pub enum JobState { + Available, + Running, + Completed, + Failed, + Paused, +} + +impl FromStr for JobState { + type Err = (); + + fn from_str(s: &str) -> Result { + match s { + "available" => Ok(JobState::Available), + "running" => Ok(JobState::Running), + "completed" => Ok(JobState::Completed), + "failed" => Ok(JobState::Failed), + _ => Err(()), + } + } +} + +impl PgHasArrayType for JobState { + fn array_type_info() -> sqlx::postgres::PgTypeInfo { + // Postgres default naming convention for array types is "_typename" + PgTypeInfo::with_name("_JobState") + } +} + +// The chunk of data needed to enqueue a job +#[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq)] +pub struct JobInit { + pub team_id: i32, + pub queue_name: String, + pub priority: i16, + pub scheduled: DateTime, + pub function_id: Option, + pub vm_state: Option, + pub parameters: Option, + pub blob: Option, + pub metadata: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct Job { + // Job metadata + pub id: Uuid, + pub team_id: i32, + pub function_id: Option, // Some jobs might not come from hog, and it doesn't /kill/ use to support that + pub created: DateTime, + + // Queue bookkeeping + // This will be set for any worker that ever has a job in the "running" state (so any worker that dequeues a job) + // but I don't want to do the work to encode that in the type system right now - later it should be + pub lock_id: Option, + pub last_heartbeat: Option>, + pub janitor_touch_count: i16, + pub transition_count: i16, + pub last_transition: DateTime, + + // Virtual queue components + pub queue_name: String, // We can have multiple "virtual queues" workers pull from + + // Job availability + pub state: JobState, + pub priority: i16, // For sorting "available" jobs. Lower is higher priority + pub scheduled: DateTime, + + // Job data + pub vm_state: Option, // The state of the VM this job is running on (if it exists) + pub metadata: Option, // Additional fields a worker can tack onto a job, for e.g. tracking some state across retries (or number of retries in general by a given class of worker) + pub parameters: Option, // The actual parameters of the job (function args for a hog function, http request for a fetch function) + pub blob: Option, // An additional, binary, parameter field (for things like fetch request body) +} + +// A struct representing a set of updates for a job. Outer none values mean "don't update this field", +// with nested none values meaning "set this field to null" for nullable fields +#[derive(Debug, Deserialize, Serialize)] +pub struct JobUpdate { + pub lock_id: Uuid, // The ID of the lock acquired when this worker dequeued the job, required for any update to be valid + pub state: Option, + pub queue_name: Option, + pub priority: Option, + pub scheduled: Option>, + pub vm_state: Option>, + pub metadata: Option>, + pub parameters: Option>, + pub blob: Option>, +} + +impl JobUpdate { + pub fn new(lock_id: Uuid) -> Self { + Self { + lock_id, + state: None, + queue_name: None, + priority: None, + scheduled: None, + vm_state: None, + metadata: None, + parameters: None, + blob: None, + } + } +} + +// Bulk inserts across multiple shards can partially succeed, so we need to track failures +// and hand back failed job inits to the caller. +pub struct BulkInsertResult { + pub failures: Vec<(QueueError, Vec)>, +} + +impl BulkInsertResult { + pub fn new() -> Self { + Self { failures: vec![] } + } + + pub fn add_failure(&mut self, err: QueueError, jobs: Vec) { + self.failures.push((err, jobs)); + } + + pub fn all_succeeded(&self) -> bool { + self.failures.is_empty() + } +} + +impl Default for BulkInsertResult { + fn default() -> Self { + Self::new() + } +} diff --git a/rust/cyclotron-core/src/worker.rs b/rust/cyclotron-core/src/worker.rs new file mode 100644 index 0000000000000..c1862eb9a7c7e --- /dev/null +++ b/rust/cyclotron-core/src/worker.rs @@ -0,0 +1,274 @@ +use std::collections::HashMap; + +use chrono::{DateTime, Utc}; +use sqlx::PgPool; +use std::sync::Mutex; +use uuid::Uuid; + +use crate::{ + ops::{ + meta::{dead_letter, run_migrations}, + worker::{dequeue_jobs, dequeue_with_vm_state, flush_job, get_vm_state, set_heartbeat}, + }, + types::Bytes, + Job, JobState, JobUpdate, PoolConfig, QueueError, +}; + +// The worker's interface to the underlying queue system - a worker can do everything except +// create jobs (because job creation has to be shard-aware). +// +// This interface looks stange, because a lot of things that would normally be done with lifetimes +// and references are done with uuid's instead (and we lose some nice raii stuff as a result), but +// the reason for this is that this is designed to be embedded in other runtimes, where handing out +// lifetime'd references or things with drop impls isn't really practical. This makes it a little +// awkward to use, but since it's meant to be the core of other abstractions, I think it's ok for +// now (client libraries should wrap this to provide better interfaces). +pub struct Worker { + pool: PgPool, + // All dequeued job IDs that haven't been flushed yet. The idea is this lets us + // manage, on the rust side of any API boundary, the "pending" update of any given + // job, such that a user can progressively build up a full update, and then flush it, + // rather than having to track the update state on their side and submit it all at once. + // This also lets us "hide" all the locking logic, which we're not totally settled on yet. + + // TRICKY - this is a sync mutex, because that simplifies using the manager in an FFI + // context (since most functions below can be sync). We have to be careful never to + // hold a lock across an await point, though. + pending: Mutex>, +} + +impl Worker { + pub async fn new(config: PoolConfig) -> Result { + let pool = config.connect().await?; + Ok(Self { + pool, + pending: Default::default(), + }) + } + + pub fn from_pool(pool: PgPool) -> Self { + Self { + pool, + pending: Default::default(), + } + } + + /// Run the latest cyclotron migrations. Panics if the migrations can't be run - failure to run migrations is purposefully fatal. + pub async fn run_migrations(&self) { + run_migrations(&self.pool).await; + } + + /// Dequeues jobs from the queue, and returns them. Job sorting happens at the queue level, + /// workers can't provide any filtering or sorting criteria - queue managers decide which jobs are run, + /// workers just run them. + pub async fn dequeue_jobs(&self, queue: &str, limit: usize) -> Result, QueueError> { + let jobs = dequeue_jobs(&self.pool, queue, limit).await?; + + let mut pending = self.pending.lock().unwrap(); + for job in &jobs { + // We need to hang onto the locks for a job until we flush it, so we can send updates. + let update = JobUpdate::new( + job.lock_id + .expect("Yell at oliver that the dequeuing code is broken. He's very sorry that your process just panicked"), + ); + pending.insert(job.id, update); + } + + Ok(jobs) + } + + /// This is the same as dequeue_jobs, but it also returns the vm_state of the job + pub async fn dequeue_with_vm_state( + &self, + queue: &str, + limit: usize, + ) -> Result, QueueError> { + let jobs = dequeue_with_vm_state(&self.pool, queue, limit).await?; + + let mut pending = self.pending.lock().unwrap(); + for job in &jobs { + // We need to hang onto the locks for a job until we flush it, so we can send updates. + let update = JobUpdate::new( + job.lock_id + .expect("Yell at oliver that the dequeuing (with vm) code is broken. He's very sorry that your process just panicked"), + ); + pending.insert(job.id, update); + } + + Ok(jobs) + } + + /// Retrieve the VM state for a job, if, for example, you dequeued it and then realised you + /// need the VM state as well. + pub async fn get_vm_state(&self, job_id: Uuid) -> Result, QueueError> { + let lock_id = { + let pending = self.pending.lock().unwrap(); + pending + .get(&job_id) + .ok_or(QueueError::UnknownJobId(job_id))? + .lock_id + }; + + get_vm_state(&self.pool, job_id, lock_id).await + } + + /// NOTE - This function can only be called once, even though the underlying + /// basic operation can be performed as many times as the caller likes (so long as + /// the job state is never set to something other than running, as that clears the + /// job lock). We're more strict here (flushes can only happen once, you must + /// flush some non-running state) to try and enforce a good interaction + /// pattern with the queue. I might return to this and loosen this constraint in the + /// future, if there's a motivating case for needing to flush partial job updates. + pub async fn flush_job(&self, job_id: Uuid) -> Result<(), QueueError> { + // TODO - this drops the job from the known jobs before the flush succeeds, + // which means that if the flush fails, we'll lose the job and can never + // update it's state (leaving it to the reaper). This is a bug, but I'm not + // sure I want to make flushes retryable just yet, so I'm leaving it for now. + // NIT: this wrapping is to ensure pending is dropped prior to the await + let update = { + let mut pending = self.pending.lock().unwrap(); + let update = pending + .remove(&job_id) + .ok_or(QueueError::UnknownJobId(job_id))?; + // It's a programming error to flush a job without setting a new state + match update.state { + Some(JobState::Running) | None => { + // Keep track of any /other/ updates that might have been stored, even in this case, + // so a user can queue up the appropriate state transition and flush properly + pending.insert(job_id, update); + return Err(QueueError::FlushWithoutNextState(job_id)); + } + _ => update, + } + }; + let mut connection = self.pool.acquire().await?; + flush_job(connection.as_mut(), job_id, update).await + } + + /// Jobs are reaped after some seconds (the number is deployment specific, and may become + /// specific on job properties like queue name in the future, as we figure out what /kinds/ of + /// jobs are longer or shorter running). A job is considered "dead" if it's in a running state, + /// and it's last heartbeat was more than the reaping time ago. This, like flush, returns an + /// error if you try to set the heartbeat on a job whose lock you don't have (which can happen + /// if e.g. the job was reaped out from under you). + pub async fn heartbeat(&self, job_id: Uuid) -> Result<(), QueueError> { + let lock_id = { + let pending = self.pending.lock().unwrap(); + pending + .get(&job_id) + .ok_or(QueueError::UnknownJobId(job_id))? + .lock_id + }; + let mut connection = self.pool.acquire().await?; + set_heartbeat(connection.as_mut(), job_id, lock_id).await + } + + /// This is how you "return" a job to the queue, by setting the state to "available" + pub fn set_state(&self, job_id: Uuid, state: JobState) -> Result<(), QueueError> { + let mut pending = self.pending.lock().unwrap(); + pending + .get_mut(&job_id) + .ok_or(QueueError::UnknownJobId(job_id))? + .state = Some(state); + Ok(()) + } + + pub fn set_queue(&self, job_id: Uuid, queue: &str) -> Result<(), QueueError> { + let mut pending = self.pending.lock().unwrap(); + pending + .get_mut(&job_id) + .ok_or(QueueError::UnknownJobId(job_id))? + .queue_name = Some(queue.to_string()); + Ok(()) + } + + /// Jobs are dequeued lowest-priority-first, so this is how you change the "base" priority of a job + /// (control tables may apply further deltas if e.g. a given function is in a degraded state) + pub fn set_priority(&self, job_id: Uuid, priority: i16) -> Result<(), QueueError> { + let mut pending = self.pending.lock().unwrap(); + pending + .get_mut(&job_id) + .ok_or(QueueError::UnknownJobId(job_id))? + .priority = Some(priority); + Ok(()) + } + + /// This is how you do e.g. retries after some time, by setting the scheduled time + /// to some time in the future. Sleeping, retry backoff, scheduling - it's all the same operation, + /// this one. + pub fn set_scheduled_at( + &self, + job_id: Uuid, + scheduled: DateTime, + ) -> Result<(), QueueError> { + let mut pending = self.pending.lock().unwrap(); + pending + .get_mut(&job_id) + .ok_or(QueueError::UnknownJobId(job_id))? + .scheduled = Some(scheduled); + Ok(()) + } + + /// Passing None here will clear the vm_state + pub fn set_vm_state( + &self, + job_id: Uuid, + vm_state: Option, // This (and the following) are Options, because the user can null them (by calling with None) + ) -> Result<(), QueueError> { + let mut pending = self.pending.lock().unwrap(); + pending + .get_mut(&job_id) + .ok_or(QueueError::UnknownJobId(job_id))? + .vm_state = Some(vm_state); + Ok(()) + } + + /// Passing None here will clear the metadata + pub fn set_metadata(&self, job_id: Uuid, metadata: Option) -> Result<(), QueueError> { + let mut pending = self.pending.lock().unwrap(); + pending + .get_mut(&job_id) + .ok_or(QueueError::UnknownJobId(job_id))? + .metadata = Some(metadata); + Ok(()) + } + + /// Passing None here will clear the parameters + pub fn set_parameters( + &self, + job_id: Uuid, + parameters: Option, + ) -> Result<(), QueueError> { + let mut pending = self.pending.lock().unwrap(); + pending + .get_mut(&job_id) + .ok_or(QueueError::UnknownJobId(job_id))? + .parameters = Some(parameters); + Ok(()) + } + + pub async fn dead_letter(&self, job_id: Uuid, reason: &str) -> Result<(), QueueError> { + // KLUDGE: Non-lexical lifetimes are good but they're just not perfect yet - + // changing this to not be a scope bump, and instead explicitly drop'ing the + // lock after the if check, makes the compiler think the lock is held across + // the await point. + { + let pending = self.pending.lock().unwrap(); + if !pending.contains_key(&job_id) { + return Err(QueueError::UnknownJobId(job_id)); + } + } + + dead_letter(&self.pool, job_id, reason).await + } + + /// Passing None here will clear the blob + pub fn set_blob(&self, job_id: Uuid, blob: Option) -> Result<(), QueueError> { + let mut pending = self.pending.lock().unwrap(); + pending + .get_mut(&job_id) + .ok_or(QueueError::UnknownJobId(job_id))? + .blob = Some(blob); + Ok(()) + } +} diff --git a/rust/cyclotron-core/tests/base_ops.rs b/rust/cyclotron-core/tests/base_ops.rs new file mode 100644 index 0000000000000..35c55c7037f44 --- /dev/null +++ b/rust/cyclotron-core/tests/base_ops.rs @@ -0,0 +1,256 @@ +use std::sync::Arc; + +use chrono::{Duration, Utc}; +use common::{assert_job_matches_init, create_new_job, dates_match}; +use cyclotron_core::{JobState, QueueManager, Worker}; +use sqlx::PgPool; +use uuid::Uuid; + +mod common; + +// I know this should be a bunch of tests, but for hacking together stuff right now, it'll do +#[sqlx::test(migrations = "./migrations")] +async fn test_queue(db: PgPool) { + let manager = QueueManager::from_pool(db.clone()); + let worker = Worker::from_pool(db); + + let job_1 = create_new_job(); + let mut job_2 = create_new_job(); + + job_2.priority = 2; // Lower priority jobs should be returned second + + let queue_name = job_1.queue_name.clone(); + + manager + .create_job(job_1.clone()) + .await + .expect("failed to create job"); + manager + .create_job(job_2.clone()) + .await + .expect("failed to create job"); + + let jobs = worker + .dequeue_jobs(&queue_name, 2) + .await + .expect("failed to dequeue job"); + + assert_eq!(jobs.len(), 2); + // This also assert that the ordering is correct in terms of priority + assert_job_matches_init(&jobs[0], &job_1); + assert_job_matches_init(&jobs[1], &job_2); + + // Now we can re-queue these jobs (imagine we had done work) + worker + .set_state(jobs[0].id, JobState::Available) + .expect("failed to set state"); + worker + .set_state(jobs[1].id, JobState::Available) + .expect("failed to set state"); + + // Flush the two jobs, having made no other changes, then assert we can re-dequeue them + worker + .flush_job(jobs[0].id) + .await + .expect("failed to flush job"); + worker + .flush_job(jobs[1].id) + .await + .expect("failed to flush job"); + + let jobs = worker + .dequeue_jobs(&queue_name, 2) + .await + .expect("failed to dequeue job"); + + assert_eq!(jobs.len(), 2); + assert_job_matches_init(&jobs[0], &job_1); + assert_job_matches_init(&jobs[1], &job_2); + + // Re-queue them again + worker + .set_state(jobs[0].id, JobState::Available) + .expect("failed to set state"); + worker + .set_state(jobs[1].id, JobState::Available) + .expect("failed to set state"); + + worker + .flush_job(jobs[0].id) + .await + .expect("failed to flush job"); + worker + .flush_job(jobs[1].id) + .await + .expect("failed to flush job"); + + // Spin up two tasks to race on dequeuing, and assert at most 2 jobs are dequeued + let worker = Arc::new(worker); + let moved = worker.clone(); + let queue_name_moved = queue_name.clone(); + let fut_1 = async move { + moved + .dequeue_jobs(&queue_name_moved, 2) + .await + .expect("failed to dequeue job") + }; + let moved = worker.clone(); + let queue_name_moved = queue_name.clone(); + let fut_2 = async move { + moved + .dequeue_jobs(&queue_name_moved, 2) + .await + .expect("failed to dequeue job") + }; + + let (jobs_1, jobs_2) = tokio::join!(fut_1, fut_2); + assert_eq!(jobs_1.len() + jobs_2.len(), 2); + + let jobs = jobs_1 + .into_iter() + .chain(jobs_2.into_iter()) + .collect::>(); + + // And now, any subsequent dequeues will return no jobs + let empty = worker + .dequeue_jobs(&queue_name, 2) + .await + .expect("failed to dequeue job"); + assert_eq!(empty.len(), 0); + + // If we try to flush a job without setting what it's next state will be (or if we set that next state to be "running"), + // we should get an error + worker + .flush_job(jobs[0].id) + .await + .expect_err("expected error due to no-next-state"); + + worker + .set_state(jobs[1].id, JobState::Running) + .expect("failed to set state"); + worker + .flush_job(jobs[1].id) + .await + .expect_err("expected error due to running state"); + + // But if we properly set the state to completed or failed, now we can flush + worker + .set_state(jobs[0].id, JobState::Completed) + .expect("failed to set state"); + worker + .set_state(jobs[1].id, JobState::Failed) + .expect("failed to set state"); + + worker + .flush_job(jobs[0].id) + .await + .expect("failed to flush job"); + worker + .flush_job(jobs[1].id) + .await + .expect("failed to flush job"); + + // And now, any subsequent dequeues will return no jobs (because these jobs are finished) + let empty = worker + .dequeue_jobs(&queue_name, 2) + .await + .expect("failed to dequeue job"); + assert_eq!(empty.len(), 0); + + // Now, lets check that we can set every variable on a job + + // Set up some initial values + let now = Utc::now(); + let mut job = create_new_job(); + job.queue_name = "test".to_string(); + job.priority = 0; + job.scheduled = now - Duration::minutes(2); + job.vm_state = None; + job.parameters = None; + job.metadata = None; + + // Queue the job + manager + .create_job(job.clone()) + .await + .expect("failed to create job"); + + // Then dequeue it + let job = worker + .dequeue_jobs("test", 1) + .await + .expect("failed to dequeue job") + .pop() + .expect("failed to dequeue job"); + + // Set everything we're able to set, including state to available, so we can dequeue it again + worker + .set_state(job.id, JobState::Available) + .expect("failed to set state"); + worker + .set_queue(job.id, "test_2") + .expect("failed to set queue"); + worker + .set_priority(job.id, 1) + .expect("failed to set priority"); + worker + .set_scheduled_at(job.id, now - Duration::minutes(10)) + .expect("failed to set scheduled_at"); + worker + .set_vm_state(job.id, Some("test".as_bytes().to_owned())) + .expect("failed to set vm_state"); + worker + .set_parameters(job.id, Some("test".as_bytes().to_owned())) + .expect("failed to set parameters"); + worker + .set_blob(job.id, Some("test".as_bytes().to_owned())) + .expect("failed to set blob"); + worker + .set_metadata(job.id, Some("test".as_bytes().to_owned())) + .expect("failed to set metadata"); + + // Flush the job + worker.flush_job(job.id).await.expect("failed to flush job"); + + // Then dequeue it again (this time being sure to grab the vm state too) + let job = worker + .dequeue_with_vm_state("test_2", 1) + .await + .expect("failed to dequeue job") + .pop() + .expect("failed to dequeue job"); + + // And every value should be the updated one + assert_eq!(job.queue_name, "test_2"); + assert_eq!(job.priority, 1); + assert!(dates_match(&job.scheduled, &(now - Duration::minutes(10))),); + assert_eq!(job.vm_state, Some("test".as_bytes().to_owned())); + assert_eq!(job.parameters, Some("test".as_bytes().to_owned())); + assert_eq!(job.metadata, Some("test".as_bytes().to_owned())); +} + +#[sqlx::test(migrations = "./migrations")] +pub async fn test_bulk_insert(db: PgPool) { + let worker = Worker::from_pool(db.clone()); + let manager = QueueManager::from_pool(db.clone()); + + let job_template = create_new_job(); + + let jobs = (0..1000) + .map(|_| { + let mut job = job_template.clone(); + job.function_id = Some(Uuid::now_v7()); + job + }) + .collect::>(); + + let result = manager.bulk_create_jobs(jobs).await; + assert!(result.all_succeeded()); + + let dequeue_jobs = worker + .dequeue_jobs(&job_template.queue_name, 1000) + .await + .expect("failed to dequeue job"); + + assert_eq!(dequeue_jobs.len(), 1000); +} diff --git a/rust/cyclotron-core/tests/common.rs b/rust/cyclotron-core/tests/common.rs new file mode 100644 index 0000000000000..16c4cc5d2eaef --- /dev/null +++ b/rust/cyclotron-core/tests/common.rs @@ -0,0 +1,42 @@ +use chrono::{DateTime, Duration, Utc}; +use cyclotron_core::{Job, JobInit}; +use uuid::Uuid; + +#[allow(dead_code)] +pub fn create_new_job() -> JobInit { + JobInit { + team_id: 1, + function_id: Some(Uuid::now_v7()), // Lets us uniquely identify jobs without having the Uuid + queue_name: "test".to_string(), + priority: 0, + scheduled: Utc::now() - Duration::minutes(1), + vm_state: None, + + parameters: None, + blob: None, + metadata: None, + } +} + +#[allow(dead_code)] +pub fn dates_match(left: &DateTime, right: &DateTime) -> bool { + // Roundtripping a datetime to PG can cause sub-ms differences, so we need to check within a margin of error + // Seeing errors like this in CI: + // assertion `left == right` failed + // left: 2024-08-08T20:41:55.964936Z + // right: 2024-08-08T20:41:55.964936997Z + let diff = *left - *right; + diff.abs() < Duration::milliseconds(1) +} + +#[allow(dead_code)] +pub fn assert_job_matches_init(job: &Job, init: &JobInit) { + assert_eq!(job.team_id, init.team_id); + assert_eq!(job.function_id, init.function_id); + assert_eq!(job.queue_name, init.queue_name); + assert_eq!(job.priority, init.priority); + assert!(dates_match(&job.scheduled, &init.scheduled)); + assert_eq!(job.vm_state, init.vm_state); + assert_eq!(job.parameters, init.parameters); + assert_eq!(job.metadata, init.metadata); +} diff --git a/rust/cyclotron-core/tests/shard.rs b/rust/cyclotron-core/tests/shard.rs new file mode 100644 index 0000000000000..8446a0c2e9f28 --- /dev/null +++ b/rust/cyclotron-core/tests/shard.rs @@ -0,0 +1,68 @@ +use chrono::{Duration, Utc}; +use common::create_new_job; +use cyclotron_core::test_support::Shard; +use sqlx::PgPool; +use tokio::sync::RwLock; + +mod common; + +pub fn get_shard(db: PgPool) -> Shard { + Shard { + pool: db, + last_healthy: RwLock::new(Utc::now()), + check_interval: Duration::milliseconds(0), // We always want to check the limit, for these tests + depth_limit: 10, + } +} + +#[sqlx::test(migrations = "./migrations")] +pub async fn test_shard_limiting(db: PgPool) { + let shard = get_shard(db.clone()); + + // We should be able to insert 10 jobs + for _ in 0..10 { + shard.create_job(create_new_job()).await.unwrap(); + } + + // And then we should fail on the 11th + let result = shard.create_job(create_new_job()).await; + assert!(result.is_err()); +} + +#[sqlx::test(migrations = "./migrations")] +pub async fn test_shard_blocking_insert_waits(db: PgPool) { + let shard = get_shard(db.clone()); + + // We should be able to insert 10 jobs + for _ in 0..10 { + shard.create_job(create_new_job()).await.unwrap(); + } + + let timeout = Some(Duration::milliseconds(50)); + + let start = Utc::now(); + // And then we should fail on the 11th + let result = shard.create_job_blocking(create_new_job(), timeout).await; + assert!(result.is_err()); + + // We should have waited at least 50ms + assert!(Utc::now() - start >= Duration::milliseconds(50)); +} + +#[sqlx::test(migrations = "./migrations")] +pub async fn test_shard_allows_bulk_inserts_beyond_capacity(db: PgPool) { + let shard = get_shard(db.clone()); + + // We should be able to insert 10 jobs + for _ in 0..9 { + shard.create_job(create_new_job()).await.unwrap(); + } + + // And then we should be able to bulk insert 1000 + let inits = (0..1000).map(|_| create_new_job()).collect::>(); + shard.bulk_create_jobs(&inits).await.unwrap(); + + // And the next insert should fail + let result = shard.create_job(create_new_job()).await; + assert!(result.is_err()); +} diff --git a/rust/cyclotron-fetch/Cargo.toml b/rust/cyclotron-fetch/Cargo.toml new file mode 100644 index 0000000000000..e9f8de05bcff0 --- /dev/null +++ b/rust/cyclotron-fetch/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "cyclotron-fetch" +version = "0.1.0" +edition = "2021" + +[lints] +workspace = true + +[dependencies] +tracing-subscriber = { workspace = true } +chrono = { workspace = true } +tokio = { workspace = true } +tracing = { workspace = true } +uuid = { workspace = true } +envconfig = { workspace = true } +axum = { workspace = true } +thiserror = { workspace = true } +cyclotron-core = { path = "../cyclotron-core" } +common-metrics = { path = "../common/metrics" } +common-dns = { path = "../common/dns" } +health = { path = "../common/health" } +reqwest = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +http = { workspace = true } +rand = { workspace = true } +futures = { workspace = true } + +[dev-dependencies] +sqlx = { workspace = true } +httpmock = { workspace = true } \ No newline at end of file diff --git a/rust/cyclotron-fetch/src/config.rs b/rust/cyclotron-fetch/src/config.rs new file mode 100644 index 0000000000000..a57cbafe5e287 --- /dev/null +++ b/rust/cyclotron-fetch/src/config.rs @@ -0,0 +1,117 @@ +use chrono::Duration; +use cyclotron_core::PoolConfig; +use envconfig::Envconfig; +use uuid::Uuid; + +#[derive(Envconfig)] +pub struct Config { + #[envconfig(from = "BIND_HOST", default = "::")] + pub host: String, + + #[envconfig(from = "BIND_PORT", default = "3304")] + pub port: u16, + + #[envconfig(default = "postgres://posthog:posthog@localhost:5432/cyclotron")] + pub database_url: String, + + #[envconfig(default = "10")] + pub pg_max_connections: u32, + + #[envconfig(default = "1")] + pub pg_min_connections: u32, + + #[envconfig(default = "30")] + pub pg_acquire_timeout_seconds: u64, + + #[envconfig(default = "300")] + pub pg_max_lifetime_seconds: u64, + + #[envconfig(default = "60")] + pub pg_idle_timeout_seconds: u64, + + #[envconfig(default = "false")] + pub allow_internal_ips: bool, + + #[envconfig(default = "default_worker_id")] + pub worker_id: String, + + #[envconfig(default = "default")] + pub shard_id: String, + + #[envconfig(default = "1")] + pub job_poll_interval_seconds: i64, + + #[envconfig(default = "1000")] + pub concurrent_requests_limit: u32, + + #[envconfig(default = "30")] + pub fetch_timeout_seconds: i64, + + #[envconfig(default = "10")] + pub max_retry_attempts: u32, + + #[envconfig(default = "fetch")] + pub queue_served: String, + + #[envconfig(default = "1000")] + pub batch_size: usize, + + #[envconfig(default = "1000000")] + pub max_response_bytes: usize, + + #[envconfig(default = "4000")] + pub retry_backoff_base_ms: i64, +} + +#[allow(dead_code)] +fn default_worker_id() -> String { + Uuid::now_v7().to_string() +} + +#[derive(Debug, Clone)] +pub struct AppConfig { + pub host: String, + pub port: u16, + pub worker_id: String, + pub shard_id: String, + pub job_poll_interval: Duration, // How long we wait to poll for new jobs, when we're at capacity or find no new jobs + pub concurrent_requests_limit: u32, + pub fetch_timeout: Duration, + pub max_retry_attempts: u32, + pub queue_served: String, + pub batch_size: usize, + pub max_response_bytes: usize, + pub retry_backoff_base: Duration, // Job retry backoff times are this * attempt count + pub allow_internal_ips: bool, +} + +impl Config { + pub fn to_components(self) -> (AppConfig, PoolConfig) { + let app_config = AppConfig { + host: self.host, + port: self.port, + worker_id: self.worker_id, + shard_id: self.shard_id, + job_poll_interval: Duration::seconds(self.job_poll_interval_seconds), + concurrent_requests_limit: self.concurrent_requests_limit, + fetch_timeout: Duration::seconds(self.fetch_timeout_seconds), + max_retry_attempts: self.max_retry_attempts, + queue_served: self.queue_served, + batch_size: self.batch_size, + max_response_bytes: self.max_response_bytes, + retry_backoff_base: Duration::milliseconds(self.retry_backoff_base_ms), + allow_internal_ips: self.allow_internal_ips, + }; + + let pool_config = PoolConfig { + db_url: self.database_url, + max_connections: Some(self.pg_max_connections), + min_connections: Some(self.pg_min_connections), + acquire_timeout_seconds: Some(self.pg_acquire_timeout_seconds), + max_lifetime_seconds: Some(self.pg_max_lifetime_seconds), + idle_timeout_seconds: Some(self.pg_idle_timeout_seconds), + }; + + (app_config, pool_config) + } +} diff --git a/rust/cyclotron-fetch/src/context.rs b/rust/cyclotron-fetch/src/context.rs new file mode 100644 index 0000000000000..f10f4149b1ada --- /dev/null +++ b/rust/cyclotron-fetch/src/context.rs @@ -0,0 +1,70 @@ +use std::sync::{Arc, RwLock}; + +use cyclotron_core::{PoolConfig, Worker, SHARD_ID_KEY}; +use health::HealthHandle; +use tokio::sync::Semaphore; + +use crate::{config::AppConfig, fetch::FetchError}; + +pub struct AppContext { + pub worker: Worker, + pub client: reqwest::Client, + pub concurrency_limit: Arc, + pub liveness: HealthHandle, + pub config: AppConfig, + pub metric_labels: RwLock>, +} + +impl AppContext { + pub async fn create( + config: AppConfig, + pool_config: PoolConfig, + liveness: HealthHandle, + ) -> Result { + let concurrency_limit = Arc::new(Semaphore::new(config.concurrent_requests_limit as usize)); + + let resolver = Arc::new(common_dns::PublicIPv4Resolver {}); + + let mut client = reqwest::Client::builder().timeout(config.fetch_timeout.to_std().unwrap()); + + if !config.allow_internal_ips { + client = client.dns_resolver(resolver); + } + + let client = client.build(); + + let client = match client { + Ok(c) => c, + Err(e) => { + return Err(FetchError::StartupError(format!( + "Failed to create reqwest client: {}", + e + ))); + } + }; + + let worker = Worker::new(pool_config).await?; + + let labels = vec![ + (SHARD_ID_KEY.to_string(), config.shard_id.clone()), + ("worker_id".to_string(), config.worker_id.clone()), + ("queue_served".to_string(), config.queue_served.clone()), + ]; + + Ok(Self { + worker, + client, + concurrency_limit, + liveness, + config, + metric_labels: RwLock::new(labels), + }) + } + + // *Relatively* cheap, compared to the update above, but + // still, better to grab at the top of your fn and then + // reuse + pub fn metric_labels(&self) -> Vec<(String, String)> { + self.metric_labels.read().unwrap().clone() + } +} diff --git a/rust/cyclotron-fetch/src/fetch.rs b/rust/cyclotron-fetch/src/fetch.rs new file mode 100644 index 0000000000000..bc5082e53ebaa --- /dev/null +++ b/rust/cyclotron-fetch/src/fetch.rs @@ -0,0 +1,726 @@ +use std::{cmp::min, collections::HashMap, fmt::Display, sync::Arc}; + +use chrono::{DateTime, Duration, Utc}; +use cyclotron_core::{Bytes, Job, JobState, QueueError, Worker}; +use futures::StreamExt; +use http::StatusCode; +use reqwest::Response; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use tokio::sync::OwnedSemaphorePermit; +use tracing::{error, instrument, warn}; +use uuid::Uuid; + +use crate::{context::AppContext, metrics_constants::*}; + +// TODO - a lot of these should maybe be configurable +pub const DEFAULT_RETRIES: u32 = 3; +pub const DEFAULT_ON_FINISH: OnFinish = OnFinish::Return; +pub const HEARTBEAT_INTERVAL_MS: i64 = 5000; + +// Exclusively for errors in the worker - these will +// never be serialised into the job queue, and indicate +// bad worker health. As a general rule, if one of these +// is produced, we should let the worker fall over (as in, +// the outer worker loop should exit). +#[derive(Error, Debug)] +pub enum FetchError { + #[error("timeout fetching jobs")] + JobFetchTimeout, + #[error(transparent)] + QueueError(#[from] QueueError), + // TRICKY - in most cases, serde errors are a FetchError (something coming from the queue was + // invalid), but this is used in cases where /we/ fail to serialise something /to/ the queue + #[error(transparent)] + SerdeError(#[from] serde_json::Error), + // We failed doing some kind of setup, like creating a reqwest client + #[error("error during startup: {0}")] + StartupError(String), +} + +#[derive(Debug, Serialize, Deserialize, Clone, Copy)] +#[serde(rename_all = "UPPERCASE")] +pub enum HttpMethod { + Get, + Post, + Patch, + Put, + Delete, +} + +impl From<&HttpMethod> for http::Method { + fn from(method: &HttpMethod) -> Self { + match method { + HttpMethod::Get => http::Method::GET, + HttpMethod::Post => http::Method::POST, + HttpMethod::Patch => http::Method::PATCH, + HttpMethod::Put => http::Method::PUT, + HttpMethod::Delete => http::Method::DELETE, + } + } +} + +// What does someone need to give us to execute a fetch? +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "lowercase")] +pub struct FetchParameters { + pub url: String, + pub method: HttpMethod, + pub return_queue: String, + pub headers: Option>, + pub max_tries: Option, // Defaults to 3 + pub on_finish: Option, // Defaults to Return +} + +// What should we do when we get a result, or run out of tries for a given job? +// Return means re-queue to the return_worker, Complete means mark as Completed/Failed +#[derive(Debug, Serialize, Deserialize, Clone, Copy)] +#[serde(rename_all = "lowercase")] +pub enum OnFinish { + Return, + Complete, +} + +// Internal bookkeeping for a fetch job +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "lowercase")] +pub struct FetchMetadata { + tries: u32, + // The history of failures seen with this job + trace: Vec, +} + +// This is what we put in the parameters of the job queue for the next +// worker to pick up +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(tag = "status", rename_all = "lowercase")] +pub enum FetchResult { + Success { response: FetchResponse }, + Failure { trace: Vec }, // If we failed entirely to fetch the job, we return the trace for user debugging +} + +impl FetchResult { + pub fn is_success(&self) -> bool { + matches!(self, FetchResult::Success { .. }) + } + + pub fn take_body(self) -> (Self, Option) { + match self { + FetchResult::Success { mut response } => { + let body = response.body.take(); + (FetchResult::Success { response }, body) + } + FetchResult::Failure { mut trace } => { + let body = trace.last_mut().and_then(|f| f.body.take()); + (FetchResult::Failure { trace }, body) + } + } + } +} + +// We distinguish between a "fetch failure" and a "worker failure" - +// worker failures are internal-only, and do not count against the +// retries of a job (generally, on worker failure, the job is either +// moved to the dead letter queue, or dropped and left to the janitor to +// reset). Feture failures are, after retries, returned to the queue, and +// represent the result of the fetch operation. +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "lowercase")] +pub struct FetchFailure { + pub kind: FetchFailureKind, + pub message: String, + pub headers: Option>, // If we have headers, we include them in the failure + pub status: Option, // If we have a status, we include it in the failure + pub timestamp: DateTime, // Useful for users to correlate logs when debugging + #[serde(skip)] // We serialise the body seperately into blob + pub body: Option, // If we have a body, we include it in the final failure (but not the trace) +} + +impl FetchFailure { + pub fn new(kind: FetchFailureKind, message: impl AsRef) -> Self { + Self { + kind, + message: message.as_ref().to_string(), + timestamp: Utc::now(), + headers: None, + status: None, + body: None, + } + } + + pub fn failure_status(status: StatusCode) -> Self { + Self { + kind: FetchFailureKind::FailureStatus, + message: format!("Received failure status: {}", status), + timestamp: Utc::now(), + headers: None, + status: Some(status.as_u16()), + body: None, + } + } + + pub fn with_headers(self, headers: HashMap) -> Self { + Self { + headers: Some(headers), + ..self + } + } + + pub fn with_status(self, status: u16) -> Self { + Self { + status: Some(status), + ..self + } + } + + pub fn with_body(self, body: Bytes) -> Self { + Self { + body: Some(body), + ..self + } + } +} + +impl From for FetchFailure { + fn from(e: reqwest::Error) -> Self { + let kind = if e.is_timeout() { + FetchFailureKind::Timeout + } else { + FetchFailureKind::RequestError + }; + Self { + kind, + message: e.to_string(), + timestamp: Utc::now(), + headers: None, + status: None, + body: None, + } + } +} + +#[derive(Debug, Serialize, Deserialize, Clone, Copy)] +#[serde(rename_all = "lowercase")] +pub enum FetchFailureKind { + Timeout, + TimeoutGettingBody, + MissingParameters, + InvalidParameters, + RequestError, + FailureStatus, + InvalidBody, // We force bodies to be a utf8 string, for the sake of callers. TODO - we should consider letting callers enforce a body schema + ResponseTooLarge, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "lowercase")] +pub struct FetchResponse { + pub status: u16, + pub headers: HashMap, + #[serde(skip)] // We serialise the body seperately into blob + pub body: Option, // This is only an option to let us `take` it, to avoid body copies on serialisation +} + +#[instrument(skip_all)] +pub async fn tick(context: Arc) -> Result { + let labels = Arc::new(context.metric_labels()); + + common_metrics::gauge( + WORKER_SAT, + &labels, + context.concurrency_limit.available_permits() as f64, + ); + + let max_jobs = min( + context.concurrency_limit.available_permits(), + context.config.batch_size, + ); + + let jobs = { + let _time = common_metrics::timing_guard(DEQUEUE_TIME, &labels); + context + .worker + .dequeue_jobs(&context.config.queue_served, max_jobs) + .await? + }; + + let num_jobs = jobs.len(); + + common_metrics::inc(WORKER_DEQUEUED, &labels, num_jobs as u64); + + let _time = common_metrics::timing_guard(SPAWN_TIME, &labels); + for job in jobs { + let context = context.clone(); + // We grab job permits individually, so that as soon as a job is finished, the + // permit to run another job is immediately available. This call should + // never block, since we only ever dequeue as many jobs as we have permits + // available. + let permit = context + .concurrency_limit + .clone() + .acquire_owned() + .await + .unwrap(); + let labels = labels.clone(); + tokio::spawn(async move { + // TODO - since worker errors are never an indication of a fetch failure, + // only of some internal worker issue, we should report unhealthy or fall + // over or something here. + if let Err(e) = run_job(context.clone(), job, permit).await { + error!("Error running job: {:?}", e); + common_metrics::inc(FETCH_JOB_ERRORS, &labels, 1) + } else { + common_metrics::inc(FETCH_JOBS_COMPLETED, &labels, 1); + } + }); + } + + Ok(num_jobs) +} + +impl From<&Job> for FetchMetadata { + fn from(job: &Job) -> Self { + let Some(m) = &job.metadata else { + return FetchMetadata { + tries: 0, + trace: vec![], + }; + }; + + let Ok(m) = serde_json::from_slice(m) else { + return FetchMetadata { + tries: 0, + trace: vec![], + }; + }; + + m + } +} + +impl TryFrom<&Job> for FetchParameters { + type Error = FetchFailure; + + fn try_from(job: &Job) -> Result { + let params = job.parameters.as_ref().ok_or(FetchFailure::new( + FetchFailureKind::MissingParameters, + "Missing parameters", + ))?; + + let Ok(p) = serde_json::from_slice(params) else { + return Err(FetchFailure::new( + FetchFailureKind::InvalidParameters, + "Invalid parameters", + )); + }; + + Ok(p) + } +} + +#[instrument(skip_all)] +pub async fn run_job( + context: Arc, + job: Job, + _permit: OwnedSemaphorePermit, +) -> Result<(), FetchError> { + let labels = context.metric_labels(); + let job_total = common_metrics::timing_guard(JOB_TOTAL_TIME, &labels); + + let metadata = FetchMetadata::from(&job); + let params = match FetchParameters::try_from(&job) { + Ok(p) => p, + Err(_) => { + // Failure to parse parameters is a programming error in whatever is handing us jobs, and we + // should dead letter the job and then return. + common_metrics::inc(FETCH_DEAD_LETTER, &labels, 1); + let res = context + .worker + .dead_letter(job.id, "Could not parse job parameters") + .await; + job_total + .label(OUTCOME_LABEL, "bad_parameters_dead_letter") + .fin(); + return Ok(res?); + } + }; + + let method = (¶ms.method).into(); + + let url: reqwest::Url = match (params.url).parse() { + Ok(u) => u, + Err(e) => { + warn!("Failed to parse URL: {}", e); + + let failure = FetchFailure::new( + FetchFailureKind::InvalidParameters, + format!("Invalid url: {} - {}", ¶ms.url, e), + ); + + // We can skip retries here - this failure will happen every time + let res = quick_fail_job( + &context.worker, + job, + params.return_queue, + params.on_finish.unwrap_or(DEFAULT_ON_FINISH), + failure, + ) + .await; + + job_total.label(OUTCOME_LABEL, "url_parse_failed").fin(); + return res; + } + }; + + let headers = match (¶ms.headers.unwrap_or_default()).try_into() { + Ok(h) => h, + Err(e) => { + warn!("Failed to parse headers: {}", e); + let failure = FetchFailure::new( + FetchFailureKind::InvalidParameters, + format!("Invalid headers: {}", e), + ); + + let res = quick_fail_job( + &context.worker, + job, + params.return_queue, + params.on_finish.unwrap_or(DEFAULT_ON_FINISH), + failure, + ) + .await; + + job_total + .label(OUTCOME_LABEL, "headers_parse_failure") + .fin(); + return res; + } + }; + + let body = reqwest::Body::from(job.blob.unwrap_or_default()); + + let mut send_fut = context + .client + .request(method, url) + .headers(headers) + .body(body) + .send(); + + let request_time = common_metrics::timing_guard(JOB_INITIAL_REQUEST_TIME, &labels); + let res = loop { + tokio::select! { + res = &mut send_fut => { + break res + } + _ = tokio::time::sleep(Duration::milliseconds(HEARTBEAT_INTERVAL_MS).to_std().unwrap()) => { + context.worker.heartbeat(job.id).await?; + } + } + }; + + let res = match res { + Ok(r) => r, + Err(e) => { + // Record the request time before any queue operations + request_time.label(OUTCOME_LABEL, "request_error").fin(); + // For the counter, we push a response status of "error" + let mut labels = labels.clone(); + labels.push(( + RESPONSE_STATUS_LABEL.to_string(), + "request_error".to_string(), + )); + common_metrics::inc(RESPONSE_RECEIVED, &labels, 1); + let res = handle_fetch_failure( + &context, + job.id, + job.priority, + &metadata, + params.max_tries.unwrap_or(DEFAULT_RETRIES), + params.return_queue, + params.on_finish.unwrap_or(DEFAULT_ON_FINISH), + e, + ) + .await; + job_total.label(OUTCOME_LABEL, "request_error").fin(); + return res; + } + }; + // Grab the response metadata, since getting the body moves it + let status = res.status(); + let headers: HashMap = res + .headers() + .iter() + .map(|(k, v)| { + ( + k.as_str().to_string(), + v.to_str().unwrap_or_default().to_string(), + ) + }) + .collect(); + + request_time.label(OUTCOME_LABEL, &status.to_string()).fin(); + // Label the job with the request status, re-binding to avoid dropping the guard + let job_total = job_total.label(RESPONSE_STATUS_LABEL, &status.to_string()); + + let mut labels = labels.clone(); // We can't move out of labels because it's borrowed by the timing guards + labels.push((RESPONSE_STATUS_LABEL.to_string(), status.to_string())); + let labels = labels; + + common_metrics::inc(RESPONSE_RECEIVED, &labels, 1); + + let body_time = common_metrics::timing_guard(BODY_FETCH_TIME, &labels); + // We pre-emptively get the response body, because we incldued it in the failure trace, even if we got a failure status + let body = first_n_bytes_of_response( + &context.worker, + job.id, + res, + context.config.max_response_bytes, + ) + .await?; + + let body = match body { + Ok(b) => b.into_bytes(), + Err(e) => { + body_time.label(OUTCOME_LABEL, "body_fetch_error").fin(); + common_metrics::inc(BODY_FETCH_FAILED, &labels, 1); + // Tag the status and headers onto the failure + let e = e.with_status(status.as_u16()).with_headers(headers); + let res = handle_fetch_failure( + &context, + job.id, + job.priority, + &metadata, + params.max_tries.unwrap_or(DEFAULT_RETRIES), + params.return_queue, + params.on_finish.unwrap_or(DEFAULT_ON_FINISH), + e, + ) + .await; + job_total.label(OUTCOME_LABEL, "body_fetch_error").fin(); + return res; + } + }; + body_time.label(OUTCOME_LABEL, "success").fin(); + common_metrics::inc(BODY_FETCH_SUCCEEDED, &labels, 1); + + // TODO - we should handle "retryable" and "permanent" failures differently, mostly + // to be polite - retrying a permanent failure isn't a correctness problem, but it's + // rude (and inefficient) + if !status.is_success() { + let failure = FetchFailure::failure_status(status) + .with_headers(headers) + .with_body(body); + let res = handle_fetch_failure( + &context, + job.id, + job.priority, + &metadata, + params.max_tries.unwrap_or(DEFAULT_RETRIES), + params.return_queue, + params.on_finish.unwrap_or(DEFAULT_ON_FINISH), + failure, + ) + .await; + job_total.label(OUTCOME_LABEL, "failure_status").fin(); + return res; + } + + let result = FetchResult::Success { + response: FetchResponse { + status: status.as_u16(), + headers, + body: Some(body), + }, + }; + + let res = complete_job( + &context.worker, + job.id, + params.return_queue, + params.on_finish.unwrap_or(DEFAULT_ON_FINISH), + result, + ) + .await; + job_total.label(OUTCOME_LABEL, "success").fin(); + res +} + +// This immediately returns a job to the return_queue, with a single failure. It's used in cases like, e.g, +// parsing errors, where we know the job will never succeed. +pub async fn quick_fail_job( + worker: &Worker, + job: Job, + return_queue: String, + on_finish: OnFinish, + failure: FetchFailure, +) -> Result<(), FetchError> { + let result = FetchResult::Failure { + trace: vec![failure], + }; + complete_job(worker, job.id, return_queue, on_finish, result).await +} + +// Checks if the retry limit has been reached, and does one of: +// - Schedule the job for retry, doing metadata bookkeeping +// - Complete the job, with the failure trace +#[allow(clippy::too_many_arguments)] +pub async fn handle_fetch_failure( + context: &AppContext, + job_id: Uuid, + old_priority: i16, + metadata: &FetchMetadata, + max_tries: u32, + return_queue: String, + on_finish: OnFinish, + failure: F, +) -> Result<(), FetchError> +where + F: Into, +{ + let failure: FetchFailure = failure.into(); + let mut metadata = metadata.clone(); + metadata.tries += 1; + metadata.trace.push(failure); + + // TODO - right now we treat all failures as retryable, but we should probably be more aggressive in + // culling retries for permanent failures (this is less of a correctness issue and more of an efficiency/ + // politeness one). We might also want to make backoff configurable. + if metadata.tries < min(max_tries, context.config.max_retry_attempts) { + let next_available = + Utc::now() + (context.config.retry_backoff_base * (metadata.tries as i32)); + // We back off for at most an hour (since callers can configure max retries to be very high) + let next_available = min(next_available, Utc::now() + Duration::hours(1)); + // Add some seconds of jitter + let next_available = + next_available + Duration::seconds((rand::random::() % 30) as i64); + + // Set us up for a retry - update metadata, reschedule + context + .worker + .set_metadata(job_id, Some(serde_json::to_vec(&metadata)?))?; + context.worker.set_state(job_id, JobState::Available)?; + context.worker.set_scheduled_at(job_id, next_available)?; + + // We downgrade the priority of jobs that fail, so first attempts at jobs get better QoS + context.worker.set_priority(job_id, old_priority + 1)?; + + context.worker.flush_job(job_id).await?; + } else { + // Complete the job, with a Failed result + let result: FetchResult = FetchResult::Failure { + trace: metadata.trace, + }; + complete_job(&context.worker, job_id, return_queue, on_finish, result).await?; + } + + Ok(()) +} + +// Complete the job with some result. +pub async fn complete_job( + worker: &Worker, + job_id: Uuid, + return_queue: String, + on_finish: OnFinish, + result: FetchResult, +) -> Result<(), FetchError> { + worker.set_state(job_id, JobState::Available)?; + worker.set_queue(job_id, &return_queue)?; + let (result, body) = result.take_body(); + + let is_success = result.is_success(); + + let result = do_or_dead_letter(worker, job_id, || serde_json::to_vec(&result)).await??; + + match (on_finish, is_success) { + (OnFinish::Complete, true) => { + worker.set_state(job_id, JobState::Completed)?; + } + (OnFinish::Complete, false) => { + worker.set_state(job_id, JobState::Failed)?; + } + (OnFinish::Return, _) => { + // If we're retuning the job, we don't care whether it succeeded or not, the caller wants it back + worker.set_state(job_id, JobState::Available)?; + } + } + + worker.set_priority(job_id, 0)?; // Reset job priority on completion + worker.set_parameters(job_id, Some(result))?; + worker.set_blob(job_id, body)?; + worker.set_metadata(job_id, None)?; // We're finished with the job, so clear our internal state + worker.flush_job(job_id).await?; + + Ok(()) +} + +// Pulls the body, while maintaining the job heartbeat. +pub async fn first_n_bytes_of_response( + worker: &Worker, + job_id: Uuid, + response: Response, + n: usize, +) -> Result, FetchError> { + let mut body = response.bytes_stream(); + // We deserialize into a vec, and then parse to a string + let mut buffer = Vec::with_capacity(n / 4); // Assume most request responses will be significantly smaller than the max + + worker.heartbeat(job_id).await?; + + loop { + tokio::select! { + chunk = body.next() => { + let chunk = match chunk { + Some(Ok(c)) => c, + Some(Err(e)) => return Ok(Err(FetchFailure::from(e))), + None => break, + }; + + buffer.extend_from_slice(&chunk); + + if buffer.len() >= n { + return Ok(Err( + FetchFailure::new(FetchFailureKind::ResponseTooLarge, "Response too large") + )); + }; + } + _ = tokio::time::sleep(Duration::milliseconds(HEARTBEAT_INTERVAL_MS).to_std().unwrap()) => {} + } + // Heartbeat every time we get a new body chunk, or every HEARTBEAT_INTERVAL_MS + worker.heartbeat(job_id).await?; + } + + // TODO - we can handle binary data here, but for now we force response bodies to be utf8 string + let body = match String::from_utf8(buffer) { + Ok(s) => s, + Err(e) => { + let buffer = e.into_bytes(); + return Ok(Err(FetchFailure::new( + FetchFailureKind::InvalidBody, + "Body could not be parsed as utf8", + ) + .with_body(buffer))); + } + }; + + Ok(Ok(body)) +} + +pub async fn do_or_dead_letter( + worker: &Worker, + job_id: Uuid, + f: impl FnOnce() -> Result, +) -> Result, FetchError> +where + E: Display, +{ + let res = f(); + match &res { + Ok(_) => {} + Err(e) => { + let reason = e.to_string(); + worker.dead_letter(job_id, &reason).await?; + } + } + Ok(res) +} diff --git a/rust/cyclotron-fetch/src/lib.rs b/rust/cyclotron-fetch/src/lib.rs new file mode 100644 index 0000000000000..b4e1a73a5004c --- /dev/null +++ b/rust/cyclotron-fetch/src/lib.rs @@ -0,0 +1,4 @@ +pub mod config; +pub mod context; +pub mod fetch; +pub mod metrics_constants; diff --git a/rust/cyclotron-fetch/src/main.rs b/rust/cyclotron-fetch/src/main.rs new file mode 100644 index 0000000000000..c0c02c6f5404b --- /dev/null +++ b/rust/cyclotron-fetch/src/main.rs @@ -0,0 +1,100 @@ +use axum::{extract::State, routing::get, Router}; +use common_metrics::setup_metrics_routes; +use cyclotron_fetch::{ + config::Config, + context::AppContext, + fetch::{tick, FetchError}, +}; +use envconfig::Envconfig; +use health::HealthRegistry; +use std::{future::ready, sync::Arc}; +use tracing::{error, info}; + +async fn listen(app: Router, bind: String) -> Result<(), std::io::Error> { + let listener = tokio::net::TcpListener::bind(bind).await?; + + axum::serve(listener, app).await?; + + Ok(()) +} + +// For axums state stuff +#[derive(Clone)] +struct WorkerId(pub String); + +pub fn app(liveness: HealthRegistry, worker_id: String) -> Router { + Router::new() + .route("/", get(index)) + .route("/_readiness", get(index)) + .route("/_liveness", get(move || ready(liveness.get_status()))) + .with_state(WorkerId(worker_id)) +} + +async fn index(State(worker_id): State) -> String { + format!("cyclotron janitor {}", worker_id.0) +} + +async fn worker_loop(context: AppContext) -> Result<(), FetchError> { + let context = Arc::new(context); + loop { + context.liveness.report_healthy().await; + let started = tick(context.clone()).await?; + info!("started {} jobs", started); + // This will happen if 1) there are no jobs or 2) we have no capacity to start new jobs. Either way, we should sleep for a bit + if started == 0 { + tokio::time::sleep(context.config.job_poll_interval.to_std().unwrap()).await; + } + } +} + +#[tokio::main] +async fn main() { + let config = Config::init_from_env().expect("failed to load configuration from env"); + tracing_subscriber::fmt::init(); + + let liveness = HealthRegistry::new("liveness"); + + let (app_config, pool_config) = config.to_components(); + let bind = format!("{}:{}", app_config.host, app_config.port); + + info!( + "Fetch worker starting with ID {:?}, listening at {}", + app_config.worker_id, bind + ); + + let worker_liveness = liveness + .register( + "worker".to_string(), + (app_config.job_poll_interval * 4).to_std().unwrap(), + ) + .await; + + let app = setup_metrics_routes(app(liveness, app_config.worker_id.clone())); + + let context = AppContext::create(app_config, pool_config, worker_liveness) + .await + .expect("failed to create app context"); + + context.worker.run_migrations().await; + + let http_server = tokio::spawn(listen(app, bind)); + + let worker_loop = tokio::spawn(worker_loop(context)); + + tokio::select! { + res = worker_loop => { + error!("janitor loop exited"); + if let Err(e) = res { + error!("janitor failed with: {}", e) + } + } + res = http_server => { + error!("http server exited"); + if let Err(e) = res { + error!("server failed with: {}", e) + } + } + } + + info!("exiting"); +} diff --git a/rust/cyclotron-fetch/src/metrics_constants.rs b/rust/cyclotron-fetch/src/metrics_constants.rs new file mode 100644 index 0000000000000..8ca24d80ee160 --- /dev/null +++ b/rust/cyclotron-fetch/src/metrics_constants.rs @@ -0,0 +1,18 @@ +// Metric names +pub const WORKER_SAT: &str = "cyclotron_fetch_worker_available_permits"; +pub const WORKER_DEQUEUED: &str = "cyclotron_fetch_worker_dequeued_jobs"; +pub const DEQUEUE_TIME: &str = "cyclotron_fetch_dequeue_ms"; +pub const SPAWN_TIME: &str = "cyclotron_fetch_spawn_tasks_ms"; +pub const JOB_TOTAL_TIME: &str = "cyclotron_fetch_job_total_run_ms"; +pub const JOB_INITIAL_REQUEST_TIME: &str = "cyclotron_fetch_job_initial_request_ms"; +pub const BODY_FETCH_TIME: &str = "cyclotron_fetch_body_fetch_ms"; +pub const FETCH_JOB_ERRORS: &str = "cyclotron_fetch_job_errors"; +pub const FETCH_JOBS_COMPLETED: &str = "cyclotron_fetch_jobs_completed"; +pub const FETCH_DEAD_LETTER: &str = "cyclotron_fetch_dead_letter"; +pub const RESPONSE_RECEIVED: &str = "cyclotron_fetch_got_response"; +pub const BODY_FETCH_FAILED: &str = "cyclotron_fetch_body_fetch_failed"; +pub const BODY_FETCH_SUCCEEDED: &str = "cyclotron_fetch_body_fetch_succeeded"; + +// Label keys +pub const OUTCOME_LABEL: &str = "outcome"; +pub const RESPONSE_STATUS_LABEL: &str = "response_status"; diff --git a/rust/cyclotron-fetch/tests/fetch.rs b/rust/cyclotron-fetch/tests/fetch.rs new file mode 100644 index 0000000000000..42657837112ad --- /dev/null +++ b/rust/cyclotron-fetch/tests/fetch.rs @@ -0,0 +1,312 @@ +use std::{collections::HashMap, sync::Arc}; + +use chrono::Duration; +use cyclotron_core::{QueueManager, Worker}; +use cyclotron_fetch::fetch::{tick, FetchResult, HttpMethod}; +use httpmock::{Method, MockServer}; +use serde_json::json; +use sqlx::PgPool; +use utils::{ + construct_job, construct_params, get_app_test_context, make_immediately_available, + wait_on_no_running, wait_on_return, +}; + +mod utils; + +#[sqlx::test(migrations = "../cyclotron-core/migrations")] +pub async fn test_run_migrations(db: PgPool) { + // This is a no-op, since the db sqlx::test gives use already has the migrations run, but it asserts that the migrations + // being run repeatedly doesn't cause any issues, and that the migrations being run are the same as the ones in the core + let context = get_app_test_context(db).await; + context.worker.run_migrations().await; +} + +#[sqlx::test(migrations = "../cyclotron-core/migrations")] +pub async fn test_completes_fetch(db: PgPool) { + let context = Arc::new(get_app_test_context(db.clone()).await); + let producer = QueueManager::from_pool(db.clone()); + let return_worker = Worker::from_pool(db.clone()); + let server = MockServer::start(); + + let mock = server.mock(|when, then| { + when.method(Method::GET).path("/test"); + then.status(200).body("Hello, world!"); + }); + + let params = construct_params(server.url("/test"), HttpMethod::Get); + let job = construct_job(params, None); + producer.create_job(job).await.unwrap(); + + let started = tick(context).await.unwrap(); + + assert_eq!(started, 1); + + let returned = wait_on_return(&return_worker, 1, false).await.unwrap(); + + let response: FetchResult = + serde_json::from_slice(returned[0].parameters.as_ref().unwrap()).unwrap(); + + let FetchResult::Success { response } = response else { + panic!("Expected success response"); + }; + + let body = String::from_utf8(returned[0].blob.clone().unwrap()).unwrap(); + + assert_eq!(response.status, 200); + assert_eq!(body, "Hello, world!"); + + mock.assert_hits(1); +} + +#[sqlx::test(migrations = "../cyclotron-core/migrations")] +pub async fn test_returns_failure_after_retries(db: PgPool) { + let context = Arc::new(get_app_test_context(db.clone()).await); + let producer = QueueManager::from_pool(db.clone()); + let return_worker = Worker::from_pool(db.clone()); + let server = MockServer::start(); + + let mock = server.mock(|when, then| { + when.method(Method::GET).path("/test"); + then.status(500).body("test server error body"); + }); + + let mut params = construct_params(server.url("/test"), HttpMethod::Get); + params.max_tries = Some(2); + + let job = construct_job(params, None); + producer.create_job(job).await.unwrap(); + + // Tick twice for retry + let started = tick(context.clone()).await.unwrap(); + assert_eq!(started, 1); + wait_on_no_running(&db, Duration::milliseconds(500)).await; + make_immediately_available(&db).await; + let started = tick(context.clone()).await.unwrap(); + assert_eq!(started, 1); + wait_on_no_running(&db, Duration::milliseconds(500)).await; + + let returned = wait_on_return(&return_worker, 1, false).await.unwrap(); + + let response: FetchResult = + serde_json::from_slice(returned[0].parameters.as_ref().unwrap()).unwrap(); + + let FetchResult::Failure { trace } = response else { + panic!("Expected failure response"); + }; + + assert!(trace.len() == 2); + for attempt in trace { + assert_eq!(attempt.status, Some(500)); + } + + mock.assert_hits(2); +} + +#[sqlx::test(migrations = "../cyclotron-core/migrations")] +pub fn fetch_discards_bad_metadata(db: PgPool) { + let context = Arc::new(get_app_test_context(db.clone()).await); + let producer = QueueManager::from_pool(db.clone()); + let return_worker = Worker::from_pool(db.clone()); + let server = MockServer::start(); + + let mock = server.mock(|when, then| { + when.method(Method::GET).path("/test"); + then.status(200).body("Hello, world!"); + }); + + let params = construct_params(server.url("/test"), HttpMethod::Get); + let mut job = construct_job(params, None); + job.metadata = Some("bad json".as_bytes().to_owned()); + producer.create_job(job).await.unwrap(); + + let started = tick(context).await.unwrap(); + + assert_eq!(started, 1); + + let returned = wait_on_return(&return_worker, 1, false).await.unwrap(); + + let response: FetchResult = + serde_json::from_slice(returned[0].parameters.as_ref().unwrap()).unwrap(); + + let FetchResult::Success { response } = response else { + panic!("Expected success response"); + }; + + let body = String::from_utf8(returned[0].blob.clone().unwrap()).unwrap(); + + assert_eq!(response.status, 200); + assert_eq!(body, "Hello, world!"); + + mock.assert_hits(1); +} + +#[sqlx::test(migrations = "../cyclotron-core/migrations")] +pub fn fetch_with_minimum_params_works(db: PgPool) { + let context = Arc::new(get_app_test_context(db.clone()).await); + let producer = QueueManager::from_pool(db.clone()); + let return_worker = Worker::from_pool(db.clone()); + let server = MockServer::start(); + + let mock = server.mock(|when, then| { + when.method(Method::GET).path("/test"); + then.status(200).body("Hello, world!"); + }); + + let params = construct_params(server.url("/test"), HttpMethod::Get); + let mut job = construct_job(params, None); + + let url = server.url("/test"); + let manual_params = json!({ + "url": url, + "method": "GET", + "return_queue": "return", + }) + .to_string(); + + job.parameters = Some(manual_params.as_bytes().to_owned()); + + producer.create_job(job).await.unwrap(); + + let started = tick(context).await.unwrap(); + + assert_eq!(started, 1); + + let returned = wait_on_return(&return_worker, 1, false).await.unwrap(); + + let response: FetchResult = + serde_json::from_slice(returned[0].parameters.as_ref().unwrap()).unwrap(); + + let FetchResult::Success { response } = response else { + panic!("Expected success response"); + }; + + let body = String::from_utf8(returned[0].blob.clone().unwrap()).unwrap(); + + assert_eq!(response.status, 200); + assert_eq!(body, "Hello, world!"); + + mock.assert_hits(1); +} + +#[sqlx::test(migrations = "../cyclotron-core/migrations")] +pub async fn test_completes_fetch_with_headers(db: PgPool) { + let context = Arc::new(get_app_test_context(db.clone()).await); + let producer = QueueManager::from_pool(db.clone()); + let return_worker = Worker::from_pool(db.clone()); + let server = MockServer::start(); + + let mock = server.mock(|when, then| { + when.method(Method::GET) + .path("/test") + .header("X-Test", "test"); + then.status(200).body("Hello, world!"); + }); + + let mut params = construct_params(server.url("/test"), HttpMethod::Get); + let mut headers = HashMap::new(); + headers.insert("X-Test".to_string(), "test".to_string()); + params.headers = Some(headers); + + let job = construct_job(params, None); + producer.create_job(job).await.unwrap(); + + let started = tick(context).await.unwrap(); + + assert_eq!(started, 1); + + let returned = wait_on_return(&return_worker, 1, false).await.unwrap(); + + let response: FetchResult = + serde_json::from_slice(returned[0].parameters.as_ref().unwrap()).unwrap(); + + let FetchResult::Success { response } = response else { + panic!("Expected success response"); + }; + + let body = String::from_utf8(returned[0].blob.clone().unwrap()).unwrap(); + + assert_eq!(response.status, 200); + assert_eq!(body, "Hello, world!"); + + mock.assert_hits(1); +} + +#[sqlx::test(migrations = "../cyclotron-core/migrations")] +pub async fn test_completes_fetch_with_body(db: PgPool) { + let context = Arc::new(get_app_test_context(db.clone()).await); + let producer = QueueManager::from_pool(db.clone()); + let return_worker = Worker::from_pool(db.clone()); + let server = MockServer::start(); + + let mock = server.mock(|when, then| { + when.method(Method::POST).path("/test").body("test body"); + then.status(200).body("Hello, world!"); + }); + + let params = construct_params(server.url("/test"), HttpMethod::Post); + + let job = construct_job(params, Some("test body".to_string().into())); + producer.create_job(job).await.unwrap(); + + let started = tick(context).await.unwrap(); + + assert_eq!(started, 1); + + let returned = wait_on_return(&return_worker, 1, false).await.unwrap(); + + let response: FetchResult = + serde_json::from_slice(returned[0].parameters.as_ref().unwrap()).unwrap(); + + let FetchResult::Success { response } = response else { + panic!("Expected success response"); + }; + + let body = String::from_utf8(returned[0].blob.clone().unwrap()).unwrap(); + + assert_eq!(response.status, 200); + assert_eq!(body, "Hello, world!"); + + mock.assert_hits(1); +} + +#[sqlx::test(migrations = "../cyclotron-core/migrations")] +pub async fn test_completes_fetch_with_vm_state(db: PgPool) { + let context = Arc::new(get_app_test_context(db.clone()).await); + let producer = QueueManager::from_pool(db.clone()); + let return_worker = Worker::from_pool(db.clone()); + let server = MockServer::start(); + + let mock = server.mock(|when, then| { + when.method(Method::GET).path("/test"); + then.status(200).body("Hello, world!"); + }); + + let params = construct_params(server.url("/test"), HttpMethod::Get); + let mut job = construct_job(params, None); + job.vm_state = Some(json!({"test": "state"}).to_string().into_bytes()); + producer.create_job(job).await.unwrap(); + + let started = tick(context).await.unwrap(); + + assert_eq!(started, 1); + + let returned = wait_on_return(&return_worker, 1, true).await.unwrap(); + + let state: serde_json::Value = + serde_json::from_slice(returned[0].vm_state.as_ref().unwrap()).unwrap(); + assert_eq!(state, json!({"test": "state"})); + + let response: FetchResult = + serde_json::from_slice(returned[0].parameters.as_ref().unwrap()).unwrap(); + + let FetchResult::Success { response } = response else { + panic!("Expected success response"); + }; + + let body = String::from_utf8(returned[0].blob.clone().unwrap()).unwrap(); + + assert_eq!(response.status, 200); + assert_eq!(body, "Hello, world!"); + + mock.assert_hits(1); +} diff --git a/rust/cyclotron-fetch/tests/utils.rs b/rust/cyclotron-fetch/tests/utils.rs new file mode 100644 index 0000000000000..6041a491d3f9b --- /dev/null +++ b/rust/cyclotron-fetch/tests/utils.rs @@ -0,0 +1,126 @@ +use std::sync::Arc; + +use chrono::{Duration, Utc}; + +use cyclotron_core::{Bytes, Job, JobInit, QueueError, Worker}; +use cyclotron_fetch::{ + config::AppConfig, + context::AppContext, + fetch::{FetchParameters, HttpMethod}, +}; +use sqlx::PgPool; +use tokio::sync::Semaphore; + +const FETCH_QUEUE: &str = "fetch"; +const RETURN_QUEUE: &str = "return"; + +pub async fn get_app_test_context(db: PgPool) -> AppContext { + let worker = Worker::from_pool(db.clone()); + let client = reqwest::Client::new(); + let concurrency_limit = Arc::new(Semaphore::new(1)); + let health = health::HealthRegistry::new("test"); + let liveness = health + .register("test".to_string(), Duration::seconds(30).to_std().unwrap()) + .await; + + let config = AppConfig { + fetch_timeout: Duration::seconds(10), + concurrent_requests_limit: 1, + host: "localhost".to_string(), + port: 16, + worker_id: "test".to_string(), + shard_id: "test".to_string(), + job_poll_interval: Duration::seconds(10), + max_retry_attempts: 3, + queue_served: FETCH_QUEUE.to_string(), + batch_size: 1000, + max_response_bytes: 1024 * 1024, + retry_backoff_base: Duration::milliseconds(1000), + allow_internal_ips: true, + }; + + AppContext { + worker, + client, + concurrency_limit, + liveness, + config, + metric_labels: Default::default(), + } +} + +pub fn construct_params(url: String, method: HttpMethod) -> FetchParameters { + FetchParameters { + url, + method, + return_queue: RETURN_QUEUE.to_string(), + headers: None, + max_tries: None, + on_finish: None, + } +} + +pub fn construct_job(parameters: FetchParameters, body: Option) -> JobInit { + JobInit { + team_id: 1, + queue_name: FETCH_QUEUE.to_string(), + priority: 0, + scheduled: Utc::now() - Duration::seconds(1), + function_id: None, + vm_state: None, + parameters: Some(serde_json::to_vec(¶meters).unwrap()), + blob: body, + metadata: None, + } +} + +pub async fn wait_on_return( + worker: &Worker, + count: usize, + with_vm: bool, +) -> Result, QueueError> { + let timeout = Duration::seconds(1); + let start = Utc::now(); + let mut returned = vec![]; + while start + timeout > Utc::now() { + let mut jobs = if with_vm { + worker.dequeue_with_vm_state(RETURN_QUEUE, 1).await? + } else { + worker.dequeue_jobs(RETURN_QUEUE, 1).await? + }; + returned.append(&mut jobs); + if returned.len() == count { + return Ok(returned); + } + if returned.len() > count { + panic!("Too many jobs returned"); + } + } + panic!("Timeout waiting for jobs to return"); +} + +pub async fn wait_on_no_running(pool: &PgPool, max_time: Duration) { + let start = Utc::now(); + loop { + let running: i64 = + sqlx::query_scalar("SELECT COUNT(*) FROM cyclotron_jobs WHERE state = 'running'") + .fetch_one(pool) + .await + .unwrap(); + if running == 0 { + return; + } + if Utc::now() - start > max_time { + panic!("Timeout waiting for jobs to finish"); + } + } +} + +pub async fn make_immediately_available(pool: &PgPool) { + sqlx::query( + "UPDATE cyclotron_jobs SET scheduled = NOW() - INTERVAL '1 second' WHERE state = 'available'", + ) + .execute(pool) + .await + .unwrap(); +} diff --git a/rust/cyclotron-janitor/Cargo.toml b/rust/cyclotron-janitor/Cargo.toml new file mode 100644 index 0000000000000..3363a16aac4a6 --- /dev/null +++ b/rust/cyclotron-janitor/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "cyclotron-janitor" +version = "0.1.0" +edition = "2021" + +[lints] +workspace = true + +[dependencies] +tracing-subscriber = { workspace = true } +chrono = { workspace = true } +tokio = { workspace = true } +tracing = { workspace = true } +uuid = { workspace = true } +envconfig = { workspace = true } +axum = { workspace = true } +eyre = { workspace = true } +cyclotron-core = { path = "../cyclotron-core" } +common-metrics = { path = "../common/metrics" } +health = { path = "../common/health" } + +[dev-dependencies] +sqlx = { workspace = true } \ No newline at end of file diff --git a/rust/cyclotron-janitor/bin/entrypoint.sh b/rust/cyclotron-janitor/bin/entrypoint.sh new file mode 100755 index 0000000000000..afbe62cd468db --- /dev/null +++ b/rust/cyclotron-janitor/bin/entrypoint.sh @@ -0,0 +1,23 @@ +#!/bin/bash +set -e + +# I set all possible env vars here, tune them as you like +export RUST_LOG="INFO" +export HOST="::" +export PORT="3302" +export DATABASE_URL="postgres://posthog:posthog@localhost:5432/cyclotron" +export CLEANUP_INTERVAL_SECONDS="10" +export PG_MAX_CONNECTIONS="10" +export PG_MIN_CONNECTIONS="1" +export PG_ACQUIRE_TIMEOUT_SECONDS="5" +export PG_MAX_LIFETIME_SECONDS="300" +export PG_IDLE_TIMEOUT_SECONDS="60" +export JANITOR_ID="test-janitor" +export JANITOR_MAX_TOUCHES="2" +export JANITOR_STALL_TIMEOUT_SECONDS="30" + +# Uncomment this to have the database be reset every time you start the janitor +sqlx database reset -y --source ../cyclotron-core/migrations +sqlx migrate run --source ../cyclotron-core/migrations + +cargo run --release \ No newline at end of file diff --git a/rust/cyclotron-janitor/src/config.rs b/rust/cyclotron-janitor/src/config.rs new file mode 100644 index 0000000000000..40ab9ee558a52 --- /dev/null +++ b/rust/cyclotron-janitor/src/config.rs @@ -0,0 +1,91 @@ +use chrono::Duration; + +use cyclotron_core::PoolConfig; +use envconfig::Envconfig; +use uuid::Uuid; + +#[derive(Envconfig)] +pub struct Config { + #[envconfig(from = "BIND_HOST", default = "::")] + pub host: String, + + #[envconfig(from = "BIND_PORT", default = "3303")] + pub port: u16, + + #[envconfig(default = "postgres://posthog:posthog@localhost:5432/cyclotron")] + pub database_url: String, + + #[envconfig(default = "30")] + pub cleanup_interval_secs: u64, + + #[envconfig(default = "10")] + pub pg_max_connections: u32, + + #[envconfig(default = "1")] + pub pg_min_connections: u32, + + #[envconfig(default = "30")] + pub pg_acquire_timeout_seconds: u64, + + #[envconfig(default = "300")] + pub pg_max_lifetime_seconds: u64, + + #[envconfig(default = "60")] + pub pg_idle_timeout_seconds: u64, + + // Generally, this should be equivalent to a "shard id", as only one janitor should be running + // per shard + #[envconfig(default = "default_janitor_id")] + pub janitor_id: String, + + #[envconfig(default = "default")] + pub shard_id: String, // A fixed shard-id. When a janitor starts up, it will write this to the shard metadata, and workers may use it when reporting metrics + + #[envconfig(default = "10")] + pub janitor_max_touches: i16, + + #[envconfig(default = "60")] + pub janitor_stall_timeout_seconds: u16, +} + +#[allow(dead_code)] +fn default_worker_id() -> String { + Uuid::now_v7().to_string() +} + +impl Config { + pub fn get_janitor_config(&self) -> JanitorConfig { + let pool_config = PoolConfig { + db_url: self.database_url.clone(), + max_connections: Some(self.pg_max_connections), + min_connections: Some(self.pg_min_connections), + acquire_timeout_seconds: Some(self.pg_acquire_timeout_seconds), + max_lifetime_seconds: Some(self.pg_max_lifetime_seconds), + idle_timeout_seconds: Some(self.pg_idle_timeout_seconds), + }; + + let settings = JanitorSettings { + stall_timeout: Duration::seconds(self.janitor_stall_timeout_seconds as i64), + max_touches: self.janitor_max_touches, + id: self.janitor_id.clone(), + shard_id: self.shard_id.clone(), + }; + + JanitorConfig { + pool: pool_config, + settings, + } + } +} + +pub struct JanitorConfig { + pub pool: PoolConfig, + pub settings: JanitorSettings, +} + +pub struct JanitorSettings { + pub stall_timeout: Duration, + pub max_touches: i16, + pub id: String, + pub shard_id: String, +} diff --git a/rust/cyclotron-janitor/src/janitor.rs b/rust/cyclotron-janitor/src/janitor.rs new file mode 100644 index 0000000000000..be36c07ec009d --- /dev/null +++ b/rust/cyclotron-janitor/src/janitor.rs @@ -0,0 +1,107 @@ +use cyclotron_core::{QueueError, SHARD_ID_KEY}; +use tracing::{info, warn}; + +use crate::{ + config::{JanitorConfig, JanitorSettings}, + metrics_constants::*, +}; + +// The janitor reports it's own metrics, this is mostly for testing purposes +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct CleanupResult { + pub completed: u64, + pub failed: u64, + pub poisoned: u64, + pub stalled: u64, +} + +pub struct Janitor { + pub inner: cyclotron_core::Janitor, + pub settings: JanitorSettings, + pub metrics_labels: Vec<(String, String)>, +} + +impl Janitor { + pub async fn new(config: JanitorConfig) -> Result { + let settings = config.settings; + let inner = cyclotron_core::Janitor::new(config.pool).await?; + + let metrics_labels = vec![ + ("janitor_id".to_string(), settings.id.clone()), + (SHARD_ID_KEY.to_string(), settings.shard_id.clone()), + ]; + + Ok(Self { + inner, + settings, + metrics_labels, + }) + } + + pub async fn run_migrations(&self) { + self.inner.run_migrations().await; + } + + pub async fn run_once(&self) -> Result { + info!("Running janitor loop"); + let _loop_start = common_metrics::timing_guard(RUN_TIME, &self.metrics_labels); + common_metrics::inc(RUN_STARTS, &self.metrics_labels, 1); + + let completed = { + let _time = common_metrics::timing_guard(COMPLETED_TIME, &self.metrics_labels); + self.inner.delete_completed_jobs().await? + }; + common_metrics::inc(COMPLETED_COUNT, &self.metrics_labels, completed); + + let failed = { + let _time = common_metrics::timing_guard(FAILED_TIME, &self.metrics_labels); + self.inner.delete_failed_jobs().await? + }; + common_metrics::inc(FAILED_COUNT, &self.metrics_labels, failed); + + let poisoned = { + let _time = common_metrics::timing_guard(POISONED_TIME, &self.metrics_labels); + self.inner + .delete_poison_pills(self.settings.stall_timeout, self.settings.max_touches) + .await? + }; + common_metrics::inc(POISONED_COUNT, &self.metrics_labels, poisoned); + + if poisoned > 0 { + warn!("Deleted {} poison pills", poisoned); + } + + let stalled = { + let _time = common_metrics::timing_guard(STALLED_TIME, &self.metrics_labels); + self.inner + .reset_stalled_jobs(self.settings.stall_timeout) + .await? + }; + common_metrics::inc(STALLED_COUNT, &self.metrics_labels, stalled); + + if stalled > 0 { + warn!("Reset {} stalled jobs", stalled); + } + + let available = { + let _time = common_metrics::timing_guard(AVAILABLE_DEPTH_TIME, &self.metrics_labels); + self.inner.waiting_jobs().await? + }; + common_metrics::gauge(AVAILABLE_DEPTH, &self.metrics_labels, available as f64); + + let dlq_depth = { + let _time = common_metrics::timing_guard(DLQ_DEPTH_TIME, &self.metrics_labels); + self.inner.count_dlq_depth().await? + }; + common_metrics::gauge(DLQ_DEPTH, &self.metrics_labels, dlq_depth as f64); + + common_metrics::inc(RUN_ENDS, &self.metrics_labels, 1); + info!("Janitor loop complete"); + Ok(CleanupResult { + completed, + failed, + poisoned, + stalled, + }) + } +} diff --git a/rust/cyclotron-janitor/src/lib.rs b/rust/cyclotron-janitor/src/lib.rs new file mode 100644 index 0000000000000..6952fea2ec3c9 --- /dev/null +++ b/rust/cyclotron-janitor/src/lib.rs @@ -0,0 +1,3 @@ +pub mod config; +pub mod janitor; +pub mod metrics_constants; diff --git a/rust/cyclotron-janitor/src/main.rs b/rust/cyclotron-janitor/src/main.rs new file mode 100644 index 0000000000000..0db35e52b8bc5 --- /dev/null +++ b/rust/cyclotron-janitor/src/main.rs @@ -0,0 +1,107 @@ +use axum::{extract::State, routing::get, Router}; +use common_metrics::setup_metrics_routes; +use cyclotron_janitor::{config::Config, janitor::Janitor}; +use envconfig::Envconfig; +use eyre::Result; +use health::{HealthHandle, HealthRegistry}; +use std::{future::ready, time::Duration}; +use tracing::{error, info}; + +/// Most of this stuff is stolen pretty shamelessly from the rustyhook janitor. It'll diverge more +/// once we introduce the management command stuff, but for now it's a good starting point. + +async fn cleanup_loop(janitor: Janitor, livenes: HealthHandle, interval_secs: u64) -> Result<()> { + let mut interval = tokio::time::interval(Duration::from_secs(interval_secs)); + + loop { + interval.tick().await; + + if let Err(e) = janitor.run_once().await { + // don't bother reporting unhealthy - a few times around this loop will put us in a stalled state + error!("janitor failed cleanup with: {}", e); + } else { + livenes.report_healthy().await; + } + } +} + +async fn listen(app: Router, bind: String) -> Result<()> { + let listener = tokio::net::TcpListener::bind(bind).await?; + + axum::serve(listener, app).await?; + + Ok(()) +} + +// For axums state stuff +#[derive(Clone)] +struct JanitorId(pub String); + +pub fn app(liveness: HealthRegistry, janitor_id: String) -> Router { + Router::new() + .route("/", get(index)) + .route("/_readiness", get(index)) + .route("/_liveness", get(move || ready(liveness.get_status()))) + .with_state(JanitorId(janitor_id)) +} + +async fn index(State(janitor_id): State) -> String { + format!("cyclotron janitor {}", janitor_id.0) +} + +#[tokio::main] +async fn main() { + let config = Config::init_from_env().expect("failed to load configuration from env"); + tracing_subscriber::fmt::init(); + + let liveness = HealthRegistry::new("liveness"); + + let janitor_config = config.get_janitor_config(); + + let janitor_id = janitor_config.settings.id.clone(); + let bind = format!("{}:{}", config.host, config.port); + + info!( + "Starting janitor with ID {:?}, listening at {}", + janitor_id, bind + ); + + let janitor = Janitor::new(janitor_config) + .await + .expect("failed to create janitor"); + + janitor.run_migrations().await; + + let janitor_liveness = liveness + .register( + "janitor".to_string(), + Duration::from_secs(config.cleanup_interval_secs * 4), + ) + .await; + + let janitor_loop = tokio::spawn(cleanup_loop( + janitor, + janitor_liveness, + config.cleanup_interval_secs, + )); + + let app = setup_metrics_routes(app(liveness, janitor_id)); + let http_server = tokio::spawn(listen(app, bind)); + + tokio::select! { + res = janitor_loop => { + error!("janitor loop exited"); + if let Err(e) = res { + error!("janitor failed with: {}", e) + } + } + res = http_server => { + error!("http server exited"); + if let Err(e) = res { + error!("server failed with: {}", e) + } + } + } + + info!("exiting"); +} diff --git a/rust/cyclotron-janitor/src/metrics_constants.rs b/rust/cyclotron-janitor/src/metrics_constants.rs new file mode 100644 index 0000000000000..2da1822484ee5 --- /dev/null +++ b/rust/cyclotron-janitor/src/metrics_constants.rs @@ -0,0 +1,21 @@ +pub const RUN_STARTS: &str = "cyclotron_janitor_run_starts"; +pub const RUN_TIME: &str = "cyclotron_janitor_total_run_ms"; +pub const RUN_ENDS: &str = "cyclotron_janitor_run_ends"; + +pub const COMPLETED_COUNT: &str = "cyclotron_janitor_completed_jobs"; +pub const COMPLETED_TIME: &str = "cyclotron_janitor_completed_jobs_cleanup_ms"; + +pub const FAILED_COUNT: &str = "cyclotron_janitor_failed_jobs"; +pub const FAILED_TIME: &str = "cyclotron_janitor_failed_jobs_cleanup_ms"; + +pub const POISONED_COUNT: &str = "cyclotron_janitor_poison_pills"; +pub const POISONED_TIME: &str = "cyclotron_janitor_poison_pills_cleanup_ms"; + +pub const STALLED_COUNT: &str = "cyclotron_janitor_stalled_jobs_reset"; +pub const STALLED_TIME: &str = "cyclotron_janitor_stalled_jobs_reset_ms"; + +// The janitor should report some basic shard-level metrics +pub const AVAILABLE_DEPTH: &str = "cyclotron_available_jobs"; +pub const AVAILABLE_DEPTH_TIME: &str = "cyclotron_available_jobs_ms"; +pub const DLQ_DEPTH: &str = "cyclotron_dead_letter_queue_depth"; +pub const DLQ_DEPTH_TIME: &str = "cyclotron_dead_letter_queue_depth_ms"; diff --git a/rust/cyclotron-janitor/tests/janitor.rs b/rust/cyclotron-janitor/tests/janitor.rs new file mode 100644 index 0000000000000..32846d7f8c647 --- /dev/null +++ b/rust/cyclotron-janitor/tests/janitor.rs @@ -0,0 +1,229 @@ +use chrono::{Duration, Utc}; + +use cyclotron_core::{JobInit, JobState, QueueManager, Worker}; +use cyclotron_janitor::{config::JanitorSettings, janitor::Janitor}; +use sqlx::PgPool; +use uuid::Uuid; + +#[sqlx::test(migrations = "../cyclotron-core/migrations")] +async fn janitor_test(db: PgPool) { + let worker = Worker::from_pool(db.clone()); + let manager = QueueManager::from_pool(db.clone()); + + // Purposefully MUCH smaller than would be used in production, so + // we can simulate stalled or poison jobs quickly + let stall_timeout = Duration::milliseconds(10); + let max_touches = 3; + + let settings = JanitorSettings { + stall_timeout, + max_touches, + id: "test_janitor".to_string(), + shard_id: "test_shard".to_string(), + }; + let janitor = Janitor { + inner: cyclotron_core::Janitor::from_pool(db.clone()), + settings, + metrics_labels: vec![], + }; + + let now = Utc::now() - Duration::seconds(10); + let queue_name = "default".to_string(); + + let job_init = JobInit { + team_id: 1, + queue_name: queue_name.clone(), + priority: 0, + scheduled: now, + function_id: Some(Uuid::now_v7()), + vm_state: None, + parameters: None, + blob: None, + metadata: None, + }; + + // First test - if we mark a job as completed, the janitor will clean it up + manager.create_job(job_init.clone()).await.unwrap(); + let job = worker + .dequeue_jobs(&queue_name, 1) + .await + .unwrap() + .pop() + .unwrap(); + + worker.set_state(job.id, JobState::Completed).unwrap(); + worker.flush_job(job.id).await.unwrap(); + + let result = janitor.run_once().await.unwrap(); + assert_eq!(result.completed, 1); + assert_eq!(result.failed, 0); + assert_eq!(result.poisoned, 0); + assert_eq!(result.stalled, 0); + + // Second test - if we mark a job as failed, the janitor will clean it up + manager.create_job(job_init.clone()).await.unwrap(); + let job = worker + .dequeue_jobs(&queue_name, 1) + .await + .unwrap() + .pop() + .unwrap(); + + worker.set_state(job.id, JobState::Failed).unwrap(); + worker.flush_job(job.id).await.unwrap(); + + let result = janitor.run_once().await.unwrap(); + assert_eq!(result.completed, 0); + assert_eq!(result.failed, 1); + assert_eq!(result.poisoned, 0); + assert_eq!(result.stalled, 0); + + // Third test - if we pick up a job, and then hold it for longer than + // the stall timeout, the janitor will reset it. After this, the worker + // cannot flush updates to the job, and must re-dequeue it. + + manager.create_job(job_init.clone()).await.unwrap(); + let job = worker + .dequeue_jobs(&queue_name, 1) + .await + .unwrap() + .pop() + .unwrap(); + + // First, cleanup won't do anything + let result = janitor.run_once().await.unwrap(); + assert_eq!(result.completed, 0); + assert_eq!(result.failed, 0); + assert_eq!(result.poisoned, 0); + assert_eq!(result.stalled, 0); + + // Then we stall on the job + tokio::time::sleep(stall_timeout.to_std().unwrap() * 2).await; + + // Now, cleanup will reset the job + let result = janitor.run_once().await.unwrap(); + assert_eq!(result.completed, 0); + assert_eq!(result.failed, 0); + assert_eq!(result.poisoned, 0); + assert_eq!(result.stalled, 1); + + // Now, the worker can't flush the job + worker.set_state(job.id, JobState::Completed).unwrap(); + let result = worker.flush_job(job.id).await; + assert!(result.is_err()); + + // But if we re-dequeue the job, we can flush it + let job = worker + .dequeue_jobs(&queue_name, 1) + .await + .unwrap() + .pop() + .unwrap(); + worker.set_state(job.id, JobState::Completed).unwrap(); + worker.flush_job(job.id).await.unwrap(); + + janitor.run_once().await.unwrap(); // Clean up the completed job to reset for the next test + + // Fourth test - if a worker holds a job for longer than the stall + // time, but calls heartbeat, the job will not be reset + + manager.create_job(job_init.clone()).await.unwrap(); + let job = worker + .dequeue_jobs(&queue_name, 1) + .await + .unwrap() + .pop() + .unwrap(); + + let start = tokio::time::Instant::now(); + loop { + worker.heartbeat(job.id).await.unwrap(); + tokio::time::sleep(Duration::milliseconds(1).to_std().unwrap()).await; + if start.elapsed() > stall_timeout.to_std().unwrap() * 2 { + break; + } + } + + let result = janitor.run_once().await.unwrap(); + assert_eq!(result.completed, 0); + assert_eq!(result.failed, 0); + assert_eq!(result.poisoned, 0); + assert_eq!(result.stalled, 0); + + // The worker can still flush the job + worker.set_state(job.id, JobState::Completed).unwrap(); + worker.flush_job(job.id).await.unwrap(); + + // and now cleanup will work + let result = janitor.run_once().await.unwrap(); + assert_eq!(result.completed, 1); + assert_eq!(result.failed, 0); + assert_eq!(result.poisoned, 0); + assert_eq!(result.stalled, 0); + + // Fifth test - if a job stalls more than max_touches + // it will be marked as poisoned and deleted + + manager.create_job(job_init.clone()).await.unwrap(); + let mut job = worker + .dequeue_jobs(&queue_name, 1) + .await + .unwrap() + .pop() + .unwrap(); + + for _ in 0..max_touches { + tokio::time::sleep(stall_timeout.to_std().unwrap() * 2).await; + let result = janitor.run_once().await.unwrap(); + assert_eq!(result.completed, 0); + assert_eq!(result.failed, 0); + assert_eq!(result.poisoned, 0); + assert_eq!(result.stalled, 1); + + // assert we can't update the job (flush and heartbeat fail) + worker.set_state(job.id, JobState::Completed).unwrap(); + let result = worker.heartbeat(job.id).await; + assert!(result.is_err()); + let result = worker.flush_job(job.id).await; + assert!(result.is_err()); + + // re-dequeue the job + job = worker + .dequeue_jobs(&queue_name, 1) + .await + .unwrap() + .pop() + .unwrap(); + } + // At this point, the "janitor touches" on the job is 3 (it's been stalled and reset 3 times), so one more cleanup loop will delete it + + // Now stall one more time, and on cleanup, we should see the job was considered poison and deleted + tokio::time::sleep(stall_timeout.to_std().unwrap() * 2).await; + let result: cyclotron_janitor::janitor::CleanupResult = janitor.run_once().await.unwrap(); + assert_eq!(result.completed, 0); + assert_eq!(result.failed, 0); + assert_eq!(result.poisoned, 1); + assert_eq!(result.stalled, 0); + + // The worker can't flush the job + worker.set_state(job.id, JobState::Completed).unwrap(); + let result = worker.flush_job(job.id).await; + assert!(result.is_err()); + + // Sixth test - the janitor can operate on multiple jobs at once + manager.create_job(job_init.clone()).await.unwrap(); + manager.create_job(job_init.clone()).await.unwrap(); + let jobs = worker.dequeue_jobs(&queue_name, 2).await.unwrap(); + + worker.set_state(jobs[0].id, JobState::Completed).unwrap(); + worker.set_state(jobs[1].id, JobState::Failed).unwrap(); + + worker.flush_job(jobs[0].id).await.unwrap(); + worker.flush_job(jobs[1].id).await.unwrap(); + + let result = janitor.run_once().await.unwrap(); + assert_eq!(result.completed, 1); + assert_eq!(result.failed, 1); + assert_eq!(result.poisoned, 0); + assert_eq!(result.stalled, 0); +} diff --git a/rust/cyclotron-node/.gitignore b/rust/cyclotron-node/.gitignore new file mode 100644 index 0000000000000..01f3230c629f3 --- /dev/null +++ b/rust/cyclotron-node/.gitignore @@ -0,0 +1,7 @@ +target +index.node +**/node_modules +**/.DS_Store +npm-debug.log*cargo.log +cross.log +dist/ diff --git a/rust/cyclotron-node/Cargo.toml b/rust/cyclotron-node/Cargo.toml new file mode 100644 index 0000000000000..0ae89199680a0 --- /dev/null +++ b/rust/cyclotron-node/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "cyclotron-node" +version = "0.1.0" +edition = "2021" +exclude = ["index.node"] + +[lints] +workspace = true + + +[lib] +crate-type = ["cdylib"] + +[dependencies] +cyclotron-core = { path = "../cyclotron-core" } +neon = { workspace = true } +once_cell = { workspace = true } +tokio = { workspace = true } +serde_json = { workspace = true } +serde = { workspace = true } +uuid = { workspace = true } +chrono = { workspace = true } \ No newline at end of file diff --git a/rust/cyclotron-node/examples/basic.js b/rust/cyclotron-node/examples/basic.js new file mode 100644 index 0000000000000..a625dd51ca851 --- /dev/null +++ b/rust/cyclotron-node/examples/basic.js @@ -0,0 +1,146 @@ +const assert = require('assert') +const cyclotron = require('../.') +const crypto = require('crypto') + +// Set of available job states +const JOB_STATES = Object.freeze({ + AVAILABLE: 'available', + RUNNING: 'running', + FAILED: 'failed', + COMPLETED: 'completed', +}) + +const AVAILABLE_WORKERS = Object.freeze({ + FETCH: 'fetch', + HOG: 'hog', +}) + +async function main() { + let poolConfig = { + db_url: 'postgresql://posthog:posthog@localhost:5432/cyclotron', + } + + let managerConfig = { + shards: [poolConfig], + } + + // Most processes will only need to do one of these, but we can do both here for demonstration purposes + await cyclotron.initWorker(JSON.stringify(poolConfig)) + await cyclotron.initManager(JSON.stringify(managerConfig)) + + // Maybe inits won't throw on re-calling, and are also short-circuiting to be almost free, so safe to call frequently + // (although I still wouldn't call them in a loop) + await cyclotron.maybeInitWorker(JSON.stringify(poolConfig)) + await cyclotron.maybeInitManager(JSON.stringify(managerConfig)) + + let five_mintes_ago = new Date(new Date().getTime() - 5 * 60000).toISOString() + let queue_name = 'default' + + let job_1 = { + team_id: 1, + queue_name, + priority: 0, + scheduled: five_mintes_ago, + function_id: crypto.randomUUID(), // Is nullable + vm_state: null, + parameters: null, + blob: null, + metadata: null, + } + + let job_2 = { + team_id: 1, + queue_name, + priority: 1, + scheduled: five_mintes_ago, + function_id: crypto.randomUUID(), // Is nullable + vm_state: null, + parameters: null, + blob: null, + metadata: null, + } + + await cyclotron.createJob(JSON.stringify(job_1)) + await cyclotron.createJob(JSON.stringify(job_2)) + + // Jobs (as well as any other 'complex' data shape) are serialized across the API boundary, + // because that's (according to the neon maintainers) /actually faster/ than doing a bunch + // of cross-runtime pointer chasing. + let jobs = JSON.parse(await cyclotron.dequeueJobs(queue_name, 2)) + assert(jobs.length === 2) + assert(jobs[0].function_id === job_1.function_id) + assert(jobs[1].function_id === job_2.function_id) + + job_1 = jobs[0] + job_2 = jobs[1] + + // All of these throw if the job hasn't been dequeued by the worker created when init_worker was called, + // or if there's some serde error - generally, interacting with the cyclotron should involve try/catch in + // some far outer catch. We can iterate on this API to make it more ergonomic with time, but + // my js/ts is... rusty (co-pilot wrote this joke) + cyclotron.setState(job_1.id, JOB_STATES.AVAILABLE) + cyclotron.setState(job_2.id, JOB_STATES.AVAILABLE) + + cyclotron.setQueue(job_1.id, 'non-default') + cyclotron.setQueue(job_2.id, 'non-default') + + // Priority is lowest-first, so this means we can assert that job_2 will be returned first on subsequent dequeue_jobs + cyclotron.setPriority(job_1.id, 2) + cyclotron.setPriority(job_2.id, 1) + + let ten_minutes_ago = new Date(new Date().getTime() - 10 * 60000).toISOString() + cyclotron.setScheduledAt(job_1.id, ten_minutes_ago) + cyclotron.setScheduledAt(job_2.id, ten_minutes_ago) + + cyclotron.setVmState(job_1.id, JSON.stringify({ state: 'running' })) + cyclotron.setVmState(job_2.id, JSON.stringify({ state: 'running' })) + + cyclotron.setParameters(job_1.id, JSON.stringify({ parameters: 'running' })) + cyclotron.setParameters(job_2.id, JSON.stringify({ parameters: 'running' })) + + cyclotron.setMetadata(job_1.id, JSON.stringify({ metadata: 'running' })) + cyclotron.setMetadata(job_2.id, JSON.stringify({ metadata: 'running' })) + + // Flush the updates queued up above back to the queue. Subsequent calls to flush + // will throw if a job isn't re-acquired. Flushes will fail if a job state update + // isn't included (workers should not purposefully leave jobs in a running state) + await cyclotron.flushJob(job_1.id) + await cyclotron.flushJob(job_2.id) + + jobs = JSON.parse(await cyclotron.dequeueWithVmState('non-default', 2)) + + assert(jobs[0].id == job_2.id) + assert(jobs[1].id == job_1.id) + + assert(jobs[0].function_id === job_2.function_id) + assert(jobs[1].function_id === job_1.function_id) + + assert(jobs[0].team_id === job_2.team_id) + assert(jobs[1].team_id === job_1.team_id) + + assert(jobs[0].queue_name === 'non-default') + assert(jobs[1].queue_name === 'non-default') + + assert(jobs[0].priority === 1) + assert(jobs[1].priority === 2) + + assert(jobs[0].scheduled === ten_minutes_ago) + assert(jobs[1].scheduled === ten_minutes_ago) + + assert(jobs[0].vm_state === JSON.stringify({ state: 'running' })) + assert(jobs[1].vm_state === JSON.stringify({ state: 'running' })) + assert(jobs[0].parameters === JSON.stringify({ parameters: 'running' })) + assert(jobs[1].parameters === JSON.stringify({ parameters: 'running' })) + assert(jobs[0].metadata === JSON.stringify({ metadata: 'running' })) + assert(jobs[1].metadata === JSON.stringify({ metadata: 'running' })) + + // Now we'll mark these jobs as completed + cyclotron.setState(job_1.id, JOB_STATES.COMPLETED) + cyclotron.setState(job_2.id, JOB_STATES.COMPLETED) + + // And flush them back to the queue + await cyclotron.flushJob(job_1.id) + await cyclotron.flushJob(job_2.id) +} + +main() diff --git a/rust/cyclotron-node/package.json b/rust/cyclotron-node/package.json new file mode 100644 index 0000000000000..a445cae4e9206 --- /dev/null +++ b/rust/cyclotron-node/package.json @@ -0,0 +1,27 @@ +{ + "name": "@posthog/cyclotron", + "version": "0.1.0", + "description": "Node bindings for cyclotron", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "test": "cargo test", + "build": "pnpm run build:cargo --release && pnpm run build:move-lib && pnpm run build:typescript", + "build:move-lib": "cp ../target/release/libcyclotron_node.dylib index.node || cp ../target/release/libcyclotron_node.so index.node", + "build:cargo": "cargo build --message-format=json > cargo.log", + "build:cargo:debug": "pnpm run build:cargo", + "build:cross": "cross build --message-format=json > cross.log", + "build:typescript": "tsc", + "package": "NODE_ENV=development pnpm i --dev && pnpm run build" + }, + "author": "", + "license": "MIT", + "devDependencies": { + "@types/node": "^22.4.1", + "typescript": "^4.7.4" + }, + "files": [ + "dist", + "index.node" + ] +} diff --git a/rust/cyclotron-node/pnpm-lock.yaml b/rust/cyclotron-node/pnpm-lock.yaml new file mode 100644 index 0000000000000..9866808970bae --- /dev/null +++ b/rust/cyclotron-node/pnpm-lock.yaml @@ -0,0 +1,31 @@ +lockfileVersion: '6.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +devDependencies: + '@types/node': + specifier: ^22.4.1 + version: 22.4.1 + typescript: + specifier: ^4.7.4 + version: 4.9.5 + +packages: + + /@types/node@22.4.1: + resolution: {integrity: sha512-1tbpb9325+gPnKK0dMm+/LMriX0vKxf6RnB0SZUqfyVkQ4fMgUSySqhxE/y8Jvs4NyF1yHzTfG9KlnkIODxPKg==} + dependencies: + undici-types: 6.19.8 + dev: true + + /typescript@4.9.5: + resolution: {integrity: sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==} + engines: {node: '>=4.2.0'} + hasBin: true + dev: true + + /undici-types@6.19.8: + resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==} + dev: true diff --git a/rust/cyclotron-node/src/index.ts b/rust/cyclotron-node/src/index.ts new file mode 100644 index 0000000000000..fb8dd659d80c3 --- /dev/null +++ b/rust/cyclotron-node/src/index.ts @@ -0,0 +1,222 @@ +// eslint-disable-next-line @typescript-eslint/no-var-requires +const cyclotron = require('../index.node') + +export interface PoolConfig { + dbUrl: string + maxConnections?: number + minConnections?: number + acquireTimeoutSeconds?: number + maxLifetimeSeconds?: number + idleTimeoutSeconds?: number +} + +// Type as expected by Cyclotron. +interface InternalPoolConfig { + db_url: string + max_connections?: number + min_connections?: number + acquire_timeout_seconds?: number + max_lifetime_seconds?: number + idle_timeout_seconds?: number +} + +export interface ManagerConfig { + shards: PoolConfig[] +} + +// Type as expected by Cyclotron. +interface InternalManagerConfig { + shards: InternalPoolConfig[] +} + +export interface JobInit { + teamId: number + functionId: string + queueName: string + priority?: number + scheduled?: Date + vmState?: string + parameters?: string + blob?: Uint8Array + metadata?: string +} + +// Type as expected by Cyclotron. +interface InternalJobInit { + team_id: number + function_id: string + queue_name: string + priority?: number + scheduled?: Date + vm_state?: string + parameters?: string + metadata?: string +} + +export type JobState = 'available' | 'running' | 'completed' | 'failed' | 'paused' + +export interface Job { + id: string + teamId: number + functionId: string | null + created: Date + lockId: string | null + lastHeartbeat: Date | null + janitorTouchCount: number + transitionCount: number + lastTransition: Date + queueName: string + state: JobState + priority: number + scheduled: Date + vmState: string | null + metadata: string | null + parameters: string | null + blob: Uint8Array | null +} + +export async function initWorker(poolConfig: PoolConfig): Promise { + const initWorkerInternal: InternalPoolConfig = { + db_url: poolConfig.dbUrl, + max_connections: poolConfig.maxConnections, + min_connections: poolConfig.minConnections, + acquire_timeout_seconds: poolConfig.acquireTimeoutSeconds, + max_lifetime_seconds: poolConfig.maxLifetimeSeconds, + idle_timeout_seconds: poolConfig.idleTimeoutSeconds, + } + return await cyclotron.initWorker(JSON.stringify(initWorkerInternal)) +} + +export async function initManager(managerConfig: ManagerConfig): Promise { + const managerConfigInternal: InternalManagerConfig = { + shards: managerConfig.shards.map((shard) => ({ + db_url: shard.dbUrl, + max_connections: shard.maxConnections, + min_connections: shard.minConnections, + acquire_timeout_seconds: shard.acquireTimeoutSeconds, + max_lifetime_seconds: shard.maxLifetimeSeconds, + idle_timeout_seconds: shard.idleTimeoutSeconds, + })), + } + return await cyclotron.initManager(JSON.stringify(managerConfigInternal)) +} + +export async function maybeInitWorker(poolConfig: PoolConfig): Promise { + const initWorkerInternal: InternalPoolConfig = { + db_url: poolConfig.dbUrl, + max_connections: poolConfig.maxConnections, + min_connections: poolConfig.minConnections, + acquire_timeout_seconds: poolConfig.acquireTimeoutSeconds, + max_lifetime_seconds: poolConfig.maxLifetimeSeconds, + idle_timeout_seconds: poolConfig.idleTimeoutSeconds, + } + return await cyclotron.maybeInitWorker(JSON.stringify(initWorkerInternal)) +} + +export async function maybeInitManager(managerConfig: ManagerConfig): Promise { + const managerConfigInternal: InternalManagerConfig = { + shards: managerConfig.shards.map((shard) => ({ + db_url: shard.dbUrl, + max_connections: shard.maxConnections, + min_connections: shard.minConnections, + acquire_timeout_seconds: shard.acquireTimeoutSeconds, + max_lifetime_seconds: shard.maxLifetimeSeconds, + idle_timeout_seconds: shard.idleTimeoutSeconds, + })), + } + return await cyclotron.maybeInitManager(JSON.stringify(managerConfigInternal)) +} + +export async function createJob(job: JobInit): Promise { + job.priority ??= 1 + job.scheduled ??= new Date() + + const jobInitInternal: InternalJobInit = { + team_id: job.teamId, + function_id: job.functionId, + queue_name: job.queueName, + priority: job.priority, + scheduled: job.scheduled, + vm_state: job.vmState, + parameters: job.parameters, + metadata: job.metadata, + } + + const json = JSON.stringify(jobInitInternal) + return await cyclotron.createJob(json, job.blob ? job.blob.buffer : undefined) +} + +export async function dequeueJobs(queueName: string, limit: number): Promise { + return await cyclotron.dequeueJobs(queueName, limit) +} + +export async function dequeueJobsWithVmState(queueName: string, limit: number): Promise { + return await cyclotron.dequeueJobsWithVmState(queueName, limit) +} + +export async function flushJob(jobId: string): Promise { + return await cyclotron.flushJob(jobId) +} + +export function setState(jobId: string, jobState: JobState): Promise { + return cyclotron.setState(jobId, jobState) +} + +export function setQueue(jobId: string, queueName: string): Promise { + return cyclotron.setQueue(jobId, queueName) +} + +export function setPriority(jobId: string, priority: number): Promise { + return cyclotron.setPriority(jobId, priority) +} + +export function setScheduledAt(jobId: string, scheduledAt: Date): Promise { + return cyclotron.setScheduledAt(jobId, scheduledAt.toISOString()) +} + +export function serializeObject(name: string, obj: Record | null): string | null { + if (obj === null) { + return null + } else if (typeof obj === 'object' && obj !== null) { + return JSON.stringify(obj) + } + throw new Error(`${name} must be either an object or null`) +} + +export function setVmState(jobId: string, vmState: Record | null): Promise { + const serialized = serializeObject('vmState', vmState) + return cyclotron.setVmState(jobId, serialized) +} + +export function setMetadata(jobId: string, metadata: Record | null): Promise { + const serialized = serializeObject('metadata', metadata) + return cyclotron.setMetadata(jobId, serialized) +} + +export function setParameters(jobId: string, parameters: Record | null): Promise { + const serialized = serializeObject('parameters', parameters) + return cyclotron.setParameters(jobId, serialized) +} + +export function setBlob(jobId: string, blob: Uint8Array | null): Promise { + return cyclotron.setBlob(jobId, blob) +} + +export default { + initWorker, + initManager, + maybeInitWorker, + maybeInitManager, + createJob, + dequeueJobs, + dequeueJobsWithVmState, + flushJob, + setState, + setQueue, + setPriority, + setScheduledAt, + setVmState, + setMetadata, + setParameters, + setBlob, +} diff --git a/rust/cyclotron-node/src/lib.rs b/rust/cyclotron-node/src/lib.rs new file mode 100644 index 0000000000000..a9071b96de856 --- /dev/null +++ b/rust/cyclotron-node/src/lib.rs @@ -0,0 +1,631 @@ +use chrono::{DateTime, Utc}; + +use cyclotron_core::{Job, JobInit, JobState, ManagerConfig, PoolConfig, QueueManager, Worker}; +use neon::{ + handle::Handle, + object::Object, + prelude::{Context, FunctionContext, ModuleContext, TaskContext}, + result::{JsResult, NeonResult}, + types::{ + buffer::TypedArray, JsArray, JsArrayBuffer, JsNull, JsNumber, JsObject, JsPromise, + JsString, JsUint8Array, JsUndefined, JsValue, + }, +}; +use once_cell::sync::OnceCell; +use serde::de::DeserializeOwned; +use serde::Deserialize; +use serde_json::Value; +use tokio::runtime::Runtime; +use uuid::Uuid; + +static WORKER: OnceCell = OnceCell::new(); +static MANAGER: OnceCell = OnceCell::new(); +static RUNTIME: OnceCell = OnceCell::new(); + +fn runtime<'a, C: Context<'a>>(cx: &mut C) -> NeonResult<&'static Runtime> { + RUNTIME + .get_or_try_init(Runtime::new) + .or_else(|e| cx.throw_error(format!("failed to create tokio runtime: {}", e))) +} + +// The general interface for calling our functions takes a JSON serialized stirng, +// because neon has no nice serde support for function arguments (and generally. +// rippping objects from the v8 runtime piece by piece is slower than just passing +// a since chunk of bytes). These are convenience functions for converting between +pub fn from_json_string<'a, T, C>(cx: &mut C, object: Handle) -> NeonResult +where + T: DeserializeOwned, + C: Context<'a>, +{ + let value: T = + serde_json::from_str(&object.value(cx)).or_else(|e| cx.throw_error(format!("{}", e)))?; + Ok(value) +} + +pub fn to_json_string<'a, T, C>(cx: &mut C, value: T) -> NeonResult +where + T: serde::Serialize, + C: Context<'a>, +{ + let value = serde_json::to_string(&value) + .or_else(|e| cx.throw_error(format!("failed to serialize value: {}", e)))?; + Ok(value) +} + +fn hello(mut cx: FunctionContext) -> JsResult { + let arg1 = cx.argument::(0)?; + let value: Value = from_json_string(&mut cx, arg1)?; + let string = to_json_string(&mut cx, value)?; + Ok(cx.string(string)) +} + +fn init_worker_impl(mut cx: FunctionContext, throw_on_reinit: bool) -> JsResult { + let arg1 = cx.argument::(0)?; + let config: PoolConfig = from_json_string(&mut cx, arg1)?; + + let (deferred, promise) = cx.promise(); + let channel = cx.channel(); + let runtime = runtime(&mut cx)?; + + let fut = async move { + let worker = Worker::new(config).await; + deferred.settle_with(&channel, move |mut cx| { + if WORKER.get().is_some() && !throw_on_reinit { + return Ok(cx.null()); // Short circuit to make using maybe_init a no-op + } + let worker = worker.or_else(|e| cx.throw_error(format!("{}", e)))?; + let already_set = WORKER.set(worker).is_err(); + if already_set && throw_on_reinit { + cx.throw_error("worker already initialized") + } else { + Ok(cx.null()) + } + }); + }; + + runtime.spawn(fut); + + Ok(promise) +} + +fn init_manager_impl(mut cx: FunctionContext, throw_on_reinit: bool) -> JsResult { + let arg1 = cx.argument::(0)?; + let config: ManagerConfig = from_json_string(&mut cx, arg1)?; + + let (deferred, promise) = cx.promise(); + let channel = cx.channel(); + let runtime = runtime(&mut cx)?; + + let fut = async move { + let manager = QueueManager::new(config).await; + deferred.settle_with(&channel, move |mut cx| { + if MANAGER.get().is_some() && !throw_on_reinit { + return Ok(cx.null()); // Short circuit to make using maybe_init a no-op + } + let manager = manager.or_else(|e| cx.throw_error(format!("{}", e)))?; + let already_set = MANAGER.set(manager).is_err(); + if already_set && throw_on_reinit { + cx.throw_error("manager already initialized") + } else { + Ok(cx.null()) + } + }); + }; + + runtime.spawn(fut); + + Ok(promise) +} + +fn init_worker(cx: FunctionContext) -> JsResult { + init_worker_impl(cx, true) +} + +fn init_manager(cx: FunctionContext) -> JsResult { + init_manager_impl(cx, true) +} + +fn maybe_init_worker(cx: FunctionContext) -> JsResult { + init_worker_impl(cx, false) +} + +fn maybe_init_manager(cx: FunctionContext) -> JsResult { + init_manager_impl(cx, false) +} + +// throw_error has a type signature that makes it inconvenient to use in closures, because +// it requires that you specify the V of the NeonResult returned, even though it's always +// an error. This is a sane thing for it to do, but it's inconvenient for us, because we +// frequently settle promises early, before we have a V to use for type inference. This little +// wrapper makes that easier, by specifying the V as JsNull +fn throw_null_err<'c, C>(cx: &mut C, msg: &str) -> NeonResult> +where + C: Context<'c>, +{ + cx.throw_error(msg) +} + +#[derive(Debug, Deserialize)] +pub struct JsJob { + pub team_id: i32, + pub queue_name: String, + pub priority: i16, + pub scheduled: DateTime, + pub function_id: Option, + pub vm_state: Option, + pub parameters: Option, + pub metadata: Option, +} + +fn create_job(mut cx: FunctionContext) -> JsResult { + let arg1: Handle = cx.argument::(0)?; + + let blob = cx.argument::(1)?; + let blob = if blob.is_a::(&mut cx) || blob.is_a::(&mut cx) { + None + } else { + Some( + blob.downcast_or_throw::(&mut cx)? + .as_slice(&cx) + .to_vec(), + ) + }; + + let js_job: JsJob = from_json_string(&mut cx, arg1)?; + + let job = JobInit { + team_id: js_job.team_id, + queue_name: js_job.queue_name, + priority: js_job.priority, + scheduled: js_job.scheduled, + function_id: js_job.function_id, + vm_state: js_job.vm_state.map(|s| s.into_bytes()), + parameters: js_job.parameters.map(|s| s.into_bytes()), + metadata: js_job.metadata.map(|s| s.into_bytes()), + blob, + }; + + let (deferred, promise) = cx.promise(); + let channel = cx.channel(); + let runtime = runtime(&mut cx)?; + + let fut = async move { + let manager = match MANAGER.get() { + Some(manager) => manager, + None => { + deferred.settle_with(&channel, |mut cx| { + throw_null_err(&mut cx, "manager not initialized") + }); + return; + } + }; + let job = manager.create_job(job).await; + deferred.settle_with(&channel, move |mut cx| { + job.or_else(|e| cx.throw_error(format!("{}", e)))?; + Ok(cx.null()) + }); + }; + + runtime.spawn(fut); + + Ok(promise) +} + +fn dequeue_jobs(mut cx: FunctionContext) -> JsResult { + let queue_name = cx.argument::(0)?.value(&mut cx); + + let limit = cx.argument::(1)?.value(&mut cx) as usize; // TODO - I don't love this cast + + let (deferred, promise) = cx.promise(); + let channel = cx.channel(); + let runtime = runtime(&mut cx)?; + + let fut = async move { + let worker = match WORKER.get() { + Some(worker) => worker, + None => { + deferred.settle_with(&channel, |mut cx| { + throw_null_err(&mut cx, "worker not initialized") + }); + return; + } + }; + let jobs = worker.dequeue_jobs(&queue_name, limit).await; + deferred.settle_with(&channel, move |mut cx| { + let jobs = jobs.or_else(|e| cx.throw_error(format!("{}", e)))?; + let jobs = jobs_to_js_array(&mut cx, jobs)?; + Ok(jobs) + }); + }; + + runtime.spawn(fut); + + Ok(promise) +} + +fn dequeue_with_vm_state(mut cx: FunctionContext) -> JsResult { + let queue_name = cx.argument::(0)?.value(&mut cx); + + let limit = cx.argument::(1)?.value(&mut cx) as usize; // TODO - I don't love this cast + + let (deferred, promise) = cx.promise(); + let channel = cx.channel(); + let runtime = runtime(&mut cx)?; + + let fut = async move { + let worker = match WORKER.get() { + Some(worker) => worker, + None => { + deferred.settle_with(&channel, |mut cx| { + throw_null_err(&mut cx, "worker not initialized") + }); + return; + } + }; + let jobs = worker.dequeue_with_vm_state(&queue_name, limit).await; + deferred.settle_with(&channel, move |mut cx| { + let jobs = jobs.or_else(|e| cx.throw_error(format!("{}", e)))?; + let jobs = jobs_to_js_array(&mut cx, jobs)?; + Ok(jobs) + }); + }; + + runtime.spawn(fut); + + Ok(promise) +} + +fn flush_job(mut cx: FunctionContext) -> JsResult { + let arg1 = cx.argument::(0)?.value(&mut cx); + let job_id: Uuid = arg1 + .parse() + .or_else(|_| cx.throw_error(format!("invalid job id: {}", arg1)))?; + + let (deferred, promise) = cx.promise(); + let channel = cx.channel(); + let runtime = runtime(&mut cx)?; + + let fut = async move { + let worker = match WORKER.get() { + Some(worker) => worker, + None => { + deferred.settle_with(&channel, |mut cx| { + throw_null_err(&mut cx, "worker not initialized") + }); + return; + } + }; + let res = worker.flush_job(job_id).await; + deferred.settle_with(&channel, move |mut cx| { + res.or_else(|e: cyclotron_core::QueueError| cx.throw_error(format!("{}", e)))?; + Ok(cx.null()) + }); + }; + + runtime.spawn(fut); + + Ok(promise) +} + +fn set_state(mut cx: FunctionContext) -> JsResult { + let arg = cx.argument::(0)?.value(&mut cx); + let job_id: Uuid = arg + .parse() + .or_else(|_| cx.throw_error(format!("invalid job id: {}", arg)))?; + + let arg = cx.argument::(1)?.value(&mut cx); + let state: JobState = arg + .parse() + .or_else(|_| cx.throw_error(format!("invalid job state: {}", arg)))?; + + WORKER + .get() + .map_or_else(|| cx.throw_error("worker not initialized"), Ok)? + .set_state(job_id, state) + .or_else(|e| cx.throw_error(format!("{}", e)))?; + + Ok(cx.null()) +} + +fn set_queue(mut cx: FunctionContext) -> JsResult { + let arg = cx.argument::(0)?.value(&mut cx); + let job_id: Uuid = arg + .parse() + .or_else(|_| cx.throw_error(format!("invalid job id: {}", arg)))?; + + let queue = cx.argument::(1)?.value(&mut cx); + + WORKER + .get() + .map_or_else(|| cx.throw_error("worker not initialized"), Ok)? + .set_queue(job_id, &queue) + .or_else(|e| cx.throw_error(format!("{}", e)))?; + + Ok(cx.null()) +} + +fn set_priority(mut cx: FunctionContext) -> JsResult { + let arg = cx.argument::(0)?.value(&mut cx); + let job_id: Uuid = arg + .parse() + .or_else(|_| cx.throw_error(format!("invalid job id: {}", arg)))?; + + let arg = cx.argument::(1)?.value(&mut cx); + let priority = arg as i16; // TODO - I /really/ don't love this cast + + WORKER + .get() + .map_or_else(|| cx.throw_error("worker not initialized"), Ok)? + .set_priority(job_id, priority) + .or_else(|e| cx.throw_error(format!("{}", e)))?; + + Ok(cx.null()) +} + +fn set_scheduled_at(mut cx: FunctionContext) -> JsResult { + let arg = cx.argument::(0)?.value(&mut cx); + let job_id: Uuid = arg + .parse() + .or_else(|_| cx.throw_error(format!("invalid job id: {}", arg)))?; + + let arg = cx.argument::(1)?.value(&mut cx); + let scheduled: DateTime = arg + .parse() + .or_else(|_| cx.throw_error(format!("invalid scheduled at: {}", arg)))?; + + WORKER + .get() + .map_or_else(|| cx.throw_error("worker not initialized"), Ok)? + .set_scheduled_at(job_id, scheduled) + .or_else(|e| cx.throw_error(format!("{}", e)))?; + + Ok(cx.null()) +} + +fn set_vm_state(mut cx: FunctionContext) -> JsResult { + let arg = cx.argument::(0)?.value(&mut cx); + let job_id: Uuid = arg + .parse() + .or_else(|_| cx.throw_error(format!("invalid job id: {}", arg)))?; + + // Tricky - we have to support passing nulls here, because that's how you clear vm state. + let vm_state = cx.argument::(1)?; + let vm_state = + if vm_state.is_a::(&mut cx) || vm_state.is_a::(&mut cx) { + None + } else { + Some( + vm_state + .downcast_or_throw::(&mut cx)? + .value(&mut cx) + .into_bytes(), + ) + }; + + WORKER + .get() + .map_or_else(|| cx.throw_error("worker not initialized"), Ok)? + .set_vm_state(job_id, vm_state) + .or_else(|e| cx.throw_error(format!("{}", e)))?; + + Ok(cx.null()) +} + +fn set_metadata(mut cx: FunctionContext) -> JsResult { + let arg = cx.argument::(0)?.value(&mut cx); + let job_id: Uuid = arg + .parse() + .or_else(|_| cx.throw_error(format!("invalid job id: {}", arg)))?; + + // Tricky - we have to support passing nulls here, because that's how you clear metadata. + let metadata = cx.argument::(1)?; + let metadata = + if metadata.is_a::(&mut cx) || metadata.is_a::(&mut cx) { + None + } else { + Some( + metadata + .downcast_or_throw::(&mut cx)? + .value(&mut cx) + .into_bytes(), + ) + }; + + WORKER + .get() + .map_or_else(|| cx.throw_error("worker not initialized"), Ok)? + .set_metadata(job_id, metadata) + .or_else(|e| cx.throw_error(format!("{}", e)))?; + + Ok(cx.null()) +} + +fn set_parameters(mut cx: FunctionContext) -> JsResult { + let arg = cx.argument::(0)?.value(&mut cx); + let job_id: Uuid = arg + .parse() + .or_else(|_| cx.throw_error(format!("invalid job id: {}", arg)))?; + + // Tricky - we have to support passing nulls here, because that's how you clear parameters. + let parameters = cx.argument::(1)?; + let parameters = + if parameters.is_a::(&mut cx) || parameters.is_a::(&mut cx) { + None + } else { + Some( + parameters + .downcast_or_throw::(&mut cx)? + .value(&mut cx) + .into_bytes(), + ) + }; + + WORKER + .get() + .map_or_else(|| cx.throw_error("worker not initialized"), Ok)? + .set_parameters(job_id, parameters) + .or_else(|e| cx.throw_error(format!("{}", e)))?; + + Ok(cx.null()) +} + +fn set_blob(mut cx: FunctionContext) -> JsResult { + let arg = cx.argument::(0)?.value(&mut cx); + let job_id: Uuid = arg + .parse() + .or_else(|_| cx.throw_error(format!("invalid job id: {}", arg)))?; + + // Tricky - we have to support passing nulls here, because that's how you clear the blob. + let blob = cx.argument::(1)?; + let blob: Option> = + if blob.is_a::(&mut cx) || blob.is_a::(&mut cx) { + None + } else { + Some( + blob.downcast_or_throw::(&mut cx)? + .as_slice(&cx) + .to_vec(), + ) + }; + + WORKER + .get() + .map_or_else(|| cx.throw_error("worker not initialized"), Ok)? + .set_blob(job_id, blob) + .or_else(|e| cx.throw_error(format!("{}", e)))?; + + Ok(cx.null()) +} + +fn jobs_to_js_array<'a>(cx: &mut TaskContext<'a>, jobs: Vec) -> JsResult<'a, JsArray> { + let js_array = JsArray::new(cx, jobs.len()); + + for (i, job) in jobs.into_iter().enumerate() { + let js_obj = JsObject::new(cx); + let null = cx.null(); + + let id_string = job.id.to_string(); + let js_id = cx.string(id_string); + js_obj.set(cx, "id", js_id)?; + + let team_id = cx.number(job.team_id as f64); + js_obj.set(cx, "teamId", team_id)?; + + if let Some(function_id) = job.function_id { + let function_id_string = function_id.to_string(); + let js_function_id = cx.string(function_id_string); + js_obj.set(cx, "functionId", js_function_id)?; + } else { + js_obj.set(cx, "functionId", null)?; + } + + let js_created = cx + .date(job.created.timestamp_millis() as f64) + .expect("failed to create date"); + js_obj.set(cx, "created", js_created)?; + + if let Some(lock_id) = job.lock_id { + let lock_id_string = lock_id.to_string(); + let js_lock_id = cx.string(lock_id_string); + js_obj.set(cx, "lockId", js_lock_id)?; + } else { + js_obj.set(cx, "lockId", null)?; + } + + if let Some(last_heartbeat) = job.last_heartbeat { + let js_last_heartbeat = cx.string(last_heartbeat.to_rfc3339()); + js_obj.set(cx, "lastHeartbeat", js_last_heartbeat)?; + } else { + js_obj.set(cx, "lastHeartbeat", null)?; + } + + let janitor_touch_count = cx.number(job.janitor_touch_count as f64); + js_obj.set(cx, "janitorTouchCount", janitor_touch_count)?; + let transition_count = cx.number(job.transition_count as f64); + js_obj.set(cx, "transitionCount", transition_count)?; + + let js_last_transition = cx.string(job.last_transition.to_rfc3339()); + js_obj.set(cx, "lastTransition", js_last_transition)?; + + let js_queue_name = cx.string(&job.queue_name); + js_obj.set(cx, "queueName", js_queue_name)?; + + let js_state = cx.string(format!("{:?}", job.state)); + js_obj.set(cx, "state", js_state)?; + + let priority = cx.number(job.priority as f64); + js_obj.set(cx, "priority", priority)?; + + let js_scheduled = cx.string(job.scheduled.to_rfc3339()); + js_obj.set(cx, "scheduled", js_scheduled)?; + + if let Some(vm_state) = job.vm_state { + let vm_state = match std::str::from_utf8(&vm_state) { + Ok(v) => v, + Err(e) => panic!("Invalid UTF-8 sequence in vm_state: {}", e), + }; + let js_vm_state = cx.string(vm_state); + js_obj.set(cx, "vmState", js_vm_state)?; + } else { + js_obj.set(cx, "vmState", null)?; + } + + if let Some(metadata) = job.metadata { + let metadata = match std::str::from_utf8(&metadata) { + Ok(v) => v, + Err(e) => panic!("Invalid UTF-8 sequence in metadata: {}", e), + }; + let js_metadata = cx.string(metadata); + js_obj.set(cx, "metadata", js_metadata)?; + } else { + js_obj.set(cx, "metadata", null)?; + } + + if let Some(parameters) = job.parameters { + let parameters = match std::str::from_utf8(¶meters) { + Ok(v) => v, + Err(e) => panic!("Invalid UTF-8 sequence in parameters: {}", e), + }; + let js_parameters = cx.string(parameters); + js_obj.set(cx, "parameters", js_parameters)?; + } else { + js_obj.set(cx, "parameters", null)?; + } + + if let Some(blob) = job.blob { + let mut js_blob = JsArrayBuffer::new(cx, blob.len())?; + let js_blob_slice = js_blob.as_mut_slice(cx); + js_blob_slice.copy_from_slice(&blob); + js_obj.set(cx, "blob", js_blob)?; + } else { + js_obj.set(cx, "blob", null)?; + } + + js_array.set(cx, i as u32, js_obj)?; + } + + Ok(js_array) +} + +#[neon::main] +fn main(mut cx: ModuleContext) -> NeonResult<()> { + cx.export_function("hello", hello)?; + cx.export_function("initWorker", init_worker)?; + cx.export_function("initManager", init_manager)?; + cx.export_function("maybeInitWorker", maybe_init_worker)?; + cx.export_function("maybeInitManager", maybe_init_manager)?; + cx.export_function("createJob", create_job)?; + cx.export_function("dequeueJobs", dequeue_jobs)?; + cx.export_function("dequeueJobsWithVmState", dequeue_with_vm_state)?; + cx.export_function("flushJob", flush_job)?; + cx.export_function("setState", set_state)?; + cx.export_function("setQueue", set_queue)?; + cx.export_function("setPriority", set_priority)?; + cx.export_function("setScheduledAt", set_scheduled_at)?; + cx.export_function("setVmState", set_vm_state)?; + cx.export_function("setMetadata", set_metadata)?; + cx.export_function("setParameters", set_parameters)?; + cx.export_function("setBlob", set_blob)?; + + Ok(()) +} diff --git a/rust/cyclotron-node/tsconfig.json b/rust/cyclotron-node/tsconfig.json new file mode 100644 index 0000000000000..4fa58397f068a --- /dev/null +++ b/rust/cyclotron-node/tsconfig.json @@ -0,0 +1,24 @@ +{ + "compilerOptions": { + "module": "CommonJS", + "target": "ESNext", + "declaration": true, + "removeComments": true, + "emitDecoratorMetadata": true, + "experimentalDecorators": true, + "moduleResolution": "node", + "esModuleInterop": true, + "allowJs": true, + "sourceMap": true, + "baseUrl": "src/", + "rootDir": "src/", + "outDir": "dist/", + "types": ["node"], + "resolveJsonModule": true, + "strict": true, + "noImplicitAny": true, + "useUnknownInCatchVariables": false + }, + "include": ["src"], + "exclude": ["node_modules", "dist", "bin"] +} diff --git a/rust/docker-compose.yml b/rust/docker-compose.yml index 7abfe14cae79c..10318b5beb5ef 100644 --- a/rust/docker-compose.yml +++ b/rust/docker-compose.yml @@ -70,7 +70,7 @@ services: container_name: setup-test-db build: context: . - dockerfile: Dockerfile.migrate + dockerfile: Dockerfile.migrate-hooks restart: on-failure depends_on: db: diff --git a/rust/feature-flags/Cargo.toml b/rust/feature-flags/Cargo.toml index e4d51dc308d34..b43d09cc93d2f 100644 --- a/rust/feature-flags/Cargo.toml +++ b/rust/feature-flags/Cargo.toml @@ -28,6 +28,7 @@ thiserror = { workspace = true } serde-pickle = { version = "1.1.1"} sha1 = "0.10.6" regex = "1.10.4" +maxminddb = "0.17" sqlx = { workspace = true } uuid = { workspace = true } diff --git a/rust/feature-flags/src/api.rs b/rust/feature-flags/src/api.rs index da2b00fbfdef5..285e09edc5c77 100644 --- a/rust/feature-flags/src/api.rs +++ b/rust/feature-flags/src/api.rs @@ -20,11 +20,40 @@ pub enum FlagValue { String(String), } +// TODO the following two types are kinda general, maybe we should move them to a shared module +#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] +#[serde(untagged)] +pub enum BooleanOrStringObject { + Boolean(bool), + Object(HashMap), +} + +#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] +#[serde(untagged)] +pub enum BooleanOrBooleanObject { + Boolean(bool), + Object(HashMap), +} + #[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct FlagsResponse { pub error_while_computing_flags: bool, pub feature_flags: HashMap, + // TODO support the other fields in the payload + // pub config: HashMap, + // pub toolbar_params: HashMap, + // pub is_authenticated: bool, + // pub supported_compression: Vec, + // pub session_recording: bool, + // pub feature_flag_payloads: HashMap, + // pub capture_performance: BooleanOrBooleanObject, + // #[serde(rename = "autocapture_opt_out")] + // pub autocapture_opt_out: bool, + // pub autocapture_exceptions: BooleanOrStringObject, + // pub surveys: bool, + // pub heatmaps: bool, + // pub site_apps: Vec, } #[derive(Error, Debug)] @@ -98,7 +127,7 @@ impl IntoResponse for FlagError { (StatusCode::BAD_REQUEST, "The distinct_id field is missing from the request. Please include a valid identifier.".to_string()) } FlagError::NoTokenError => { - (StatusCode::UNAUTHORIZED, "No API key provided. Please include a valid API key in your request.".to_string()) + (StatusCode::UNAUTHORIZED, "No API token provided. Please include a valid API token in your request.".to_string()) } FlagError::TokenValidationError => { (StatusCode::UNAUTHORIZED, "The provided API key is invalid or has expired. Please check your API key and try again.".to_string()) diff --git a/rust/feature-flags/src/config.rs b/rust/feature-flags/src/config.rs index d9e1bf06b1ee3..1f1c47a99249b 100644 --- a/rust/feature-flags/src/config.rs +++ b/rust/feature-flags/src/config.rs @@ -1,17 +1,19 @@ use envconfig::Envconfig; use once_cell::sync::Lazy; use std::net::SocketAddr; +use std::path::{Path, PathBuf}; use std::str::FromStr; +// TODO rewrite this to follow the AppConfig pattern in other files #[derive(Envconfig, Clone, Debug)] pub struct Config { #[envconfig(default = "127.0.0.1:3001")] pub address: SocketAddr, - #[envconfig(default = "postgres://posthog:posthog@localhost:5432/test_posthog")] + #[envconfig(default = "postgres://posthog:posthog@localhost:5432/posthog")] pub write_database_url: String, - #[envconfig(default = "postgres://posthog:posthog@localhost:5432/test_posthog")] + #[envconfig(default = "postgres://posthog:posthog@localhost:5432/posthog")] pub read_database_url: String, #[envconfig(default = "1024")] @@ -25,6 +27,9 @@ pub struct Config { #[envconfig(default = "1")] pub acquire_timeout_secs: u64, + + #[envconfig(from = "MAXMIND_DB_PATH", default = "")] + pub maxmind_db_path: String, } impl Config { @@ -38,6 +43,21 @@ impl Config { max_concurrent_jobs: 1024, max_pg_connections: 100, acquire_timeout_secs: 1, + maxmind_db_path: "".to_string(), + } + } + + pub fn get_maxmind_db_path(&self) -> PathBuf { + if self.maxmind_db_path.is_empty() { + Path::new(env!("CARGO_MANIFEST_DIR")) + .parent() + .unwrap() + .parent() + .unwrap() + .join("share") + .join("GeoLite2-City.mmdb") + } else { + PathBuf::from(&self.maxmind_db_path) } } } @@ -57,11 +77,11 @@ mod tests { ); assert_eq!( config.write_database_url, - "postgres://posthog:posthog@localhost:5432/test_posthog" + "postgres://posthog:posthog@localhost:5432/posthog" ); assert_eq!( config.read_database_url, - "postgres://posthog:posthog@localhost:5432/test_posthog" + "postgres://posthog:posthog@localhost:5432/posthog" ); assert_eq!(config.max_concurrent_jobs, 1024); assert_eq!(config.max_pg_connections, 100); diff --git a/rust/feature-flags/src/flag_definitions.rs b/rust/feature-flags/src/flag_definitions.rs index ef1db6762a5ce..df0b1998cd1bf 100644 --- a/rust/feature-flags/src/flag_definitions.rs +++ b/rust/feature-flags/src/flag_definitions.rs @@ -12,7 +12,7 @@ pub const TEAM_FLAGS_CACHE_PREFIX: &str = "posthog:1:team_feature_flags_"; #[derive(Debug, Deserialize)] pub enum GroupTypeIndex {} -#[derive(Debug, Clone, PartialEq, Eq, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] #[serde(rename_all = "snake_case")] pub enum OperatorType { Exact, @@ -32,7 +32,7 @@ pub enum OperatorType { IsDateBefore, } -#[derive(Debug, Clone, Deserialize)] +#[derive(Debug, Clone, Deserialize, Serialize)] pub struct PropertyFilter { pub key: String, // TODO: Probably need a default for value? @@ -45,26 +45,26 @@ pub struct PropertyFilter { pub group_type_index: Option, } -#[derive(Debug, Clone, Deserialize)] +#[derive(Debug, Clone, Deserialize, Serialize)] pub struct FlagGroupType { pub properties: Option>, pub rollout_percentage: Option, pub variant: Option, } -#[derive(Debug, Clone, Deserialize)] +#[derive(Debug, Clone, Deserialize, Serialize)] pub struct MultivariateFlagVariant { pub key: String, pub name: Option, pub rollout_percentage: f64, } -#[derive(Debug, Clone, Deserialize)] +#[derive(Debug, Clone, Deserialize, Serialize)] pub struct MultivariateFlagOptions { pub variants: Vec, } -#[derive(Debug, Clone, Deserialize)] +#[derive(Debug, Clone, Deserialize, Serialize)] pub struct FlagFilters { pub groups: Vec, pub multivariate: Option, @@ -73,7 +73,7 @@ pub struct FlagFilters { pub super_groups: Option>, } -#[derive(Debug, Clone, Deserialize)] +#[derive(Debug, Clone, Deserialize, Serialize)] pub struct FeatureFlag { pub id: i32, pub team_id: i32, @@ -117,7 +117,7 @@ impl FeatureFlag { } } -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct FeatureFlagList { pub flags: Vec, } @@ -189,6 +189,27 @@ impl FeatureFlagList { Ok(FeatureFlagList { flags: flags_list }) } + + pub async fn update_flags_in_redis( + client: Arc, + team_id: i32, + flags: &FeatureFlagList, + ) -> Result<(), FlagError> { + let payload = serde_json::to_string(&flags.flags).map_err(|e| { + tracing::error!("Failed to serialize flags: {}", e); + FlagError::DataParsingError + })?; + + client + .set(format!("{TEAM_FLAGS_CACHE_PREFIX}{}", team_id), payload) + .await + .map_err(|e| { + tracing::error!("Failed to update Redis cache: {}", e); + FlagError::CacheUpdateError + })?; + + Ok(()) + } } #[cfg(test)] @@ -1371,23 +1392,46 @@ mod tests { } // Fetch flags from both sources - let redis_flags = FeatureFlagList::from_redis(redis_client, team.id) + let mut redis_flags = FeatureFlagList::from_redis(redis_client, team.id) .await .expect("Failed to fetch flags from Redis"); - let pg_flags = FeatureFlagList::from_pg(pg_client, team.id) + let mut pg_flags = FeatureFlagList::from_pg(pg_client, team.id) .await .expect("Failed to fetch flags from Postgres"); + // Sort flags by key to ensure consistent order + redis_flags.flags.sort_by(|a, b| a.key.cmp(&b.key)); + pg_flags.flags.sort_by(|a, b| a.key.cmp(&b.key)); + // Compare results - assert_eq!(redis_flags.flags.len(), pg_flags.flags.len()); + assert_eq!( + redis_flags.flags.len(), + pg_flags.flags.len(), + "Number of flags mismatch" + ); + for (redis_flag, pg_flag) in redis_flags.flags.iter().zip(pg_flags.flags.iter()) { - assert_eq!(redis_flag.key, pg_flag.key); - assert_eq!(redis_flag.name, pg_flag.name); - assert_eq!(redis_flag.active, pg_flag.active); - assert_eq!(redis_flag.deleted, pg_flag.deleted); + assert_eq!(redis_flag.key, pg_flag.key, "Flag key mismatch"); + assert_eq!( + redis_flag.name, pg_flag.name, + "Flag name mismatch for key: {}", + redis_flag.key + ); + assert_eq!( + redis_flag.active, pg_flag.active, + "Flag active status mismatch for key: {}", + redis_flag.key + ); + assert_eq!( + redis_flag.deleted, pg_flag.deleted, + "Flag deleted status mismatch for key: {}", + redis_flag.key + ); assert_eq!( redis_flag.filters.groups[0].rollout_percentage, - pg_flag.filters.groups[0].rollout_percentage + pg_flag.filters.groups[0].rollout_percentage, + "Flag rollout percentage mismatch for key: {}", + redis_flag.key ); } } diff --git a/rust/feature-flags/src/flag_matching.rs b/rust/feature-flags/src/flag_matching.rs index 485d8a646e823..88911c90bb7be 100644 --- a/rust/feature-flags/src/flag_matching.rs +++ b/rust/feature-flags/src/flag_matching.rs @@ -1,7 +1,7 @@ use crate::{ api::FlagError, database::Client as DatabaseClient, - flag_definitions::{FeatureFlag, FlagGroupType}, + flag_definitions::{FeatureFlag, FlagGroupType, PropertyFilter}, property_matching::match_property, }; use serde_json::Value; @@ -37,7 +37,13 @@ pub struct FeatureFlagMatcher { // pub flags: Vec, pub distinct_id: String, pub database_client: Option>, + // TODO do I need cached_properties, or do I get them from the request? + // like, in python I get them from the request. Hmm. Let me try that. + // OH, or is this the FlagMatcherCache. Yeah, so this is the flag matcher cache cached_properties: Option>, + person_property_overrides: Option>, + // TODO handle group properties + // group_property_overrides: Option>>, } const LONG_SCALE: u64 = 0xfffffffffffffff; @@ -46,21 +52,28 @@ impl FeatureFlagMatcher { pub fn new( distinct_id: String, database_client: Option>, + person_property_overrides: Option>, + // group_property_overrides: Option>>, ) -> Self { FeatureFlagMatcher { // flags, distinct_id, database_client, cached_properties: None, + person_property_overrides, + // group_property_overrides, } } - pub async fn get_match(&mut self, feature_flag: &FeatureFlag) -> FeatureFlagMatch { + pub async fn get_match( + &mut self, + feature_flag: &FeatureFlag, + ) -> Result { if self.hashed_identifier(feature_flag).is_none() { - return FeatureFlagMatch { + return Ok(FeatureFlagMatch { matches: false, variant: None, - }; + }); } // TODO: super groups for early access @@ -69,10 +82,10 @@ impl FeatureFlagMatcher { for (index, condition) in feature_flag.get_conditions().iter().enumerate() { let (is_match, _evaluation_reason) = self .is_condition_match(feature_flag, condition, index) - .await; + .await?; if is_match { - // TODO: This is a bit awkward, we should handle overrides only when variants exist. + // TODO: this is a bit awkward, we should only handle variants when overrides exist let variant = match condition.variant.clone() { Some(variant_override) => { if feature_flag @@ -88,16 +101,25 @@ impl FeatureFlagMatcher { None => self.get_matching_variant(feature_flag), }; - // let payload = self.get_matching_payload(is_match, variant, feature_flag); - return FeatureFlagMatch { + return Ok(FeatureFlagMatch { matches: true, variant, - }; + }); } } - FeatureFlagMatch { + Ok(FeatureFlagMatch { matches: false, variant: None, + }) + } + + fn check_rollout(&self, feature_flag: &FeatureFlag, rollout_percentage: f64) -> (bool, String) { + if rollout_percentage == 100.0 + || self.get_hash(feature_flag, "") <= (rollout_percentage / 100.0) + { + (true, "CONDITION_MATCH".to_string()) + } else { + (false, "OUT_OF_ROLLOUT_BOUND".to_string()) } } @@ -108,39 +130,71 @@ impl FeatureFlagMatcher { feature_flag: &FeatureFlag, condition: &FlagGroupType, _index: usize, - ) -> (bool, String) { + ) -> Result<(bool, String), FlagError> { let rollout_percentage = condition.rollout_percentage.unwrap_or(100.0); - let mut condition_match = true; - - if let Some(ref properties) = condition.properties { + if let Some(properties) = &condition.properties { if properties.is_empty() { - condition_match = true; - } else { - // TODO: First handle given override properties before going to db - let target_properties = self - .get_person_properties(feature_flag.team_id, self.distinct_id.clone()) - .await - .unwrap_or_default(); - // TODO: Handle db issues / person not found - - condition_match = properties.iter().all(|property| { - match_property(property, &target_properties, false).unwrap_or(false) - }); + return Ok(self.check_rollout(feature_flag, rollout_percentage)); } - }; - if !condition_match { - return (false, "NO_CONDITION_MATCH".to_string()); - } else if rollout_percentage == 100.0 { - // TODO: Check floating point schenanigans if any - return (true, "CONDITION_MATCH".to_string()); + let target_properties = self.get_target_properties(feature_flag, properties).await?; + + if !self.all_properties_match(properties, &target_properties) { + return Ok((false, "NO_CONDITION_MATCH".to_string())); + } } - if self.get_hash(feature_flag, "") > (rollout_percentage / 100.0) { - return (false, "OUT_OF_ROLLOUT_BOUND".to_string()); + Ok(self.check_rollout(feature_flag, rollout_percentage)) + } + + async fn get_target_properties( + &mut self, + feature_flag: &FeatureFlag, + properties: &[PropertyFilter], + ) -> Result, FlagError> { + self.get_person_properties(feature_flag.team_id, properties) + .await + // TODO handle group properties, will go something like this + // if let Some(group_index) = feature_flag.get_group_type_index() { + // self.get_group_properties(feature_flag.team_id, group_index, properties) + // } else { + // self.get_person_properties(feature_flag.team_id, properties) + // .await + // } + } + + async fn get_person_properties( + &mut self, + team_id: i32, + properties: &[PropertyFilter], + ) -> Result, FlagError> { + if let Some(person_overrides) = &self.person_property_overrides { + // Check if all required properties are present in the overrides + // and none of them are of type "cohort" + let should_prefer_overrides = properties + .iter() + .all(|prop| person_overrides.contains_key(&prop.key) && prop.prop_type != "cohort"); + + if should_prefer_overrides { + // TODO let's count how often this happens + return Ok(person_overrides.clone()); + } } - (true, "CONDITION_MATCH".to_string()) + // If we don't prefer the overrides (they're either not present, don't contain enough properties to evaluate the condition, + // or contain a cohort property), fall back to getting properties from cache or DB + self.get_person_properties_from_cache_or_db(team_id, self.distinct_id.clone()) + .await + } + + fn all_properties_match( + &self, + condition_properties: &[PropertyFilter], + target_properties: &HashMap, + ) -> bool { + condition_properties + .iter() + .all(|property| match_property(property, target_properties, false).unwrap_or(false)) } pub fn hashed_identifier(&self, feature_flag: &FeatureFlag) -> Option { @@ -177,6 +231,7 @@ impl FeatureFlagMatcher { hash_val as f64 / LONG_SCALE as f64 } + /// This function takes a feature flag and returns the key of the variant that should be shown to the user. pub fn get_matching_variant(&self, feature_flag: &FeatureFlag) -> Option { let hash = self.get_hash(feature_flag, "variant"); let mut total_percentage = 0.0; @@ -190,7 +245,8 @@ impl FeatureFlagMatcher { None } - pub async fn get_person_properties( + /// This function takes a feature flag and returns the key of the variant that should be shown to the user. + pub async fn get_person_properties_from_cache_or_db( &mut self, team_id: i32, distinct_id: String, @@ -199,6 +255,7 @@ impl FeatureFlagMatcher { // Depends on how often we're calling this function // to match all flags for a single person + // TODO which of these properties do we need to cache? if let Some(cached_props) = self.cached_properties.clone() { // TODO: Maybe we don't want to copy around all user properties, this will by far be the largest chunk // of data we're copying around. Can we work with references here? @@ -243,6 +300,15 @@ impl FeatureFlagMatcher { Ok(props) } + + // async fn get_group_properties_from_cache_or_db( + // &self, + // team_id: i32, + // group_index: usize, + // properties: &Vec, + // ) -> HashMap { + // todo!() + // } } #[cfg(test)] @@ -251,7 +317,33 @@ mod tests { use serde_json::json; use super::*; - use crate::test_utils::{insert_new_team_in_pg, insert_person_for_team_in_pg, setup_pg_client}; + use crate::{ + flag_definitions::{FlagFilters, MultivariateFlagOptions, MultivariateFlagVariant}, + test_utils::{insert_new_team_in_pg, insert_person_for_team_in_pg, setup_pg_client}, + }; + + fn create_test_flag(team_id: i32, properties: Vec) -> FeatureFlag { + FeatureFlag { + id: 1, + team_id, + name: Some("Test Flag".to_string()), + key: "test_flag".to_string(), + filters: FlagFilters { + groups: vec![FlagGroupType { + properties: Some(properties), + rollout_percentage: Some(100.0), + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }, + deleted: false, + active: true, + ensure_experience_continuity: false, + } + } #[tokio::test] async fn test_fetch_properties_from_pg_to_match() { @@ -300,22 +392,129 @@ mod tests { )) .unwrap(); - let mut matcher = FeatureFlagMatcher::new(distinct_id, Some(client.clone())); - let match_result = matcher.get_match(&flag).await; + let mut matcher = FeatureFlagMatcher::new(distinct_id, Some(client.clone()), None); + let match_result = matcher.get_match(&flag).await.unwrap(); assert_eq!(match_result.matches, true); assert_eq!(match_result.variant, None); // property value is different - let mut matcher = FeatureFlagMatcher::new(not_matching_distinct_id, Some(client.clone())); - let match_result = matcher.get_match(&flag).await; + let mut matcher = + FeatureFlagMatcher::new(not_matching_distinct_id, Some(client.clone()), None); + let match_result = matcher.get_match(&flag).await.unwrap(); assert_eq!(match_result.matches, false); assert_eq!(match_result.variant, None); // person does not exist let mut matcher = - FeatureFlagMatcher::new("other_distinct_id".to_string(), Some(client.clone())); - let match_result = matcher.get_match(&flag).await; + FeatureFlagMatcher::new("other_distinct_id".to_string(), Some(client.clone()), None); + let match_result = matcher.get_match(&flag).await.unwrap(); assert_eq!(match_result.matches, false); assert_eq!(match_result.variant, None); } + + #[tokio::test] + async fn test_person_property_overrides() { + let client = setup_pg_client(None).await; + let team = insert_new_team_in_pg(client.clone()).await.unwrap(); + + let flag = create_test_flag( + team.id, + vec![PropertyFilter { + key: "email".to_string(), + value: json!("override@example.com"), + operator: None, + prop_type: "email".to_string(), + group_type_index: None, + }], + ); + + let overrides = HashMap::from([("email".to_string(), json!("override@example.com"))]); + + let mut matcher = FeatureFlagMatcher::new( + "test_user".to_string(), + Some(client.clone()), + Some(overrides), + ); + + let match_result = matcher.get_match(&flag).await.unwrap(); + assert_eq!(match_result.matches, true); + } + + #[test] + fn test_hashed_identifier() { + let flag = create_test_flag(1, vec![]); + + let matcher = FeatureFlagMatcher::new("test_user".to_string(), None, None); + assert_eq!( + matcher.hashed_identifier(&flag), + Some("test_user".to_string()) + ); + + // Test with a group type index (this part of the functionality is not implemented yet) + // let mut group_flag = flag.clone(); + // group_flag.filters.aggregation_group_type_index = Some(1); + // assert_eq!(matcher.hashed_identifier(&group_flag), Some("".to_string())); + } + + #[test] + fn test_get_matching_variant() { + let flag = FeatureFlag { + id: 1, + team_id: 1, + name: Some("Test Flag".to_string()), + key: "test_flag".to_string(), + filters: FlagFilters { + groups: vec![], + multivariate: Some(MultivariateFlagOptions { + variants: vec![ + MultivariateFlagVariant { + name: Some("Control".to_string()), + key: "control".to_string(), + rollout_percentage: 33.0, + }, + MultivariateFlagVariant { + name: Some("Test".to_string()), + key: "test".to_string(), + rollout_percentage: 33.0, + }, + MultivariateFlagVariant { + name: Some("Test2".to_string()), + key: "test2".to_string(), + rollout_percentage: 34.0, + }, + ], + }), + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }, + deleted: false, + active: true, + ensure_experience_continuity: false, + }; + + let matcher = FeatureFlagMatcher::new("test_user".to_string(), None, None); + let variant = matcher.get_matching_variant(&flag); + assert!(variant.is_some()); + assert!(["control", "test", "test2"].contains(&variant.unwrap().as_str())); + } + + #[tokio::test] + async fn test_is_condition_match_empty_properties() { + let flag = create_test_flag(1, vec![]); + + let condition = FlagGroupType { + variant: None, + properties: Some(vec![]), + rollout_percentage: Some(100.0), + }; + + let mut matcher = FeatureFlagMatcher::new("test_user".to_string(), None, None); + let (is_match, reason) = matcher + .is_condition_match(&flag, &condition, 0) + .await + .unwrap(); + assert_eq!(is_match, true); + assert_eq!(reason, "CONDITION_MATCH"); + } } diff --git a/rust/feature-flags/src/flag_request.rs b/rust/feature-flags/src/flag_request.rs new file mode 100644 index 0000000000000..d15876a37481b --- /dev/null +++ b/rust/feature-flags/src/flag_request.rs @@ -0,0 +1,488 @@ +use std::{collections::HashMap, sync::Arc}; + +use bytes::Bytes; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use tracing::instrument; + +use crate::{ + api::FlagError, database::Client as DatabaseClient, flag_definitions::FeatureFlagList, + redis::Client as RedisClient, team::Team, +}; + +#[derive(Default, Debug, Deserialize, Serialize)] +pub struct FlagRequest { + #[serde( + alias = "$token", + alias = "api_key", + skip_serializing_if = "Option::is_none" + )] + pub token: Option, + #[serde(alias = "$distinct_id", skip_serializing_if = "Option::is_none")] + pub distinct_id: Option, + pub geoip_disable: Option, + #[serde(default)] + pub person_properties: Option>, + #[serde(default)] + pub groups: Option>, + // TODO: better type this since we know its going to be a nested json + #[serde(default)] + pub group_properties: Option>>, + #[serde(alias = "$anon_distinct_id", skip_serializing_if = "Option::is_none")] + pub anon_distinct_id: Option, + pub ip_address: Option, +} + +impl FlagRequest { + /// Takes a request payload and tries to read it. + /// Only supports base64 encoded payloads or uncompressed utf-8 as json. + #[instrument(skip_all)] + pub fn from_bytes(bytes: Bytes) -> Result { + tracing::debug!(len = bytes.len(), "decoding new request"); + // TODO: Add base64 decoding + let payload = String::from_utf8(bytes.into()).map_err(|e| { + tracing::error!("failed to decode body: {}", e); + FlagError::RequestDecodingError(String::from("invalid body encoding")) + })?; + + tracing::debug!(json = payload, "decoded event data"); + Ok(serde_json::from_str::(&payload)?) + } + + /// Extracts the token from the request and verifies it against the cache. + /// If the token is not found in the cache, it will be verified against the database. + pub async fn extract_and_verify_token( + &self, + redis_client: Arc, + pg_client: Arc, + ) -> Result { + let token = match self { + FlagRequest { + token: Some(token), .. + } => token.to_string(), + _ => return Err(FlagError::NoTokenError), + }; + + match Team::from_redis(redis_client.clone(), token.clone()).await { + Ok(_) => Ok(token), + Err(_) => { + // Fallback: Check PostgreSQL if not found in Redis + match Team::from_pg(pg_client, token.clone()).await { + Ok(team) => { + // Token found in PostgreSQL, update Redis cache so that we can verify it from Redis next time + if let Err(e) = Team::update_redis_cache(redis_client, &team).await { + tracing::warn!("Failed to update Redis cache: {}", e); + } + Ok(token) + } + // TODO do we need a custom error here to track the fallback + Err(_) => Err(FlagError::TokenValidationError), + } + } + } + } + + /// Fetches the team from the cache or the database. + /// If the team is not found in the cache, it will be fetched from the database and stored in the cache. + /// Returns the team if found, otherwise an error. + pub async fn get_team_from_cache_or_pg( + &self, + token: &str, + redis_client: Arc, + pg_client: Arc, + ) -> Result { + match Team::from_redis(redis_client.clone(), token.to_owned()).await { + Ok(team) => Ok(team), + Err(_) => match Team::from_pg(pg_client, token.to_owned()).await { + Ok(team) => { + // If we have the team in postgres, but not redis, update redis so we're faster next time + // TODO: we have some counters in django for tracking these cache misses + // we should probably do the same here + if let Err(e) = Team::update_redis_cache(redis_client, &team).await { + tracing::warn!("Failed to update Redis cache: {}", e); + } + Ok(team) + } + // TODO what kind of error should we return here? + Err(e) => Err(e), + }, + } + } + + /// Extracts the distinct_id from the request. + /// If the distinct_id is missing or empty, an error is returned. + pub fn extract_distinct_id(&self) -> Result { + let distinct_id = match &self.distinct_id { + None => return Err(FlagError::MissingDistinctId), + Some(id) => id, + }; + + match distinct_id.len() { + 0 => Err(FlagError::EmptyDistinctId), + 1..=200 => Ok(distinct_id.to_owned()), + _ => Ok(distinct_id.chars().take(200).collect()), + } + } + + /// Extracts the properties from the request. + /// If the request contains person_properties, they are returned. + // TODO do I even need this one? + pub fn extract_properties(&self) -> HashMap { + let mut properties = HashMap::new(); + if let Some(person_properties) = &self.person_properties { + properties.extend(person_properties.clone()); + } + properties + } + + /// Fetches the flags from the cache or the database. + /// If the flags are not found in the cache, they will be fetched from the database and stored in the cache. + /// Returns the flags if found, otherwise an error. + pub async fn get_flags_from_cache_or_pg( + &self, + team_id: i32, + redis_client: Arc, + pg_client: Arc, + ) -> Result { + match FeatureFlagList::from_redis(redis_client.clone(), team_id).await { + Ok(flags) => Ok(flags), + Err(_) => match FeatureFlagList::from_pg(pg_client, team_id).await { + Ok(flags) => { + // If we have the flags in postgres, but not redis, update redis so we're faster next time + // TODO: we have some counters in django for tracking these cache misses + // we should probably do the same here + if let Err(e) = + FeatureFlagList::update_flags_in_redis(redis_client, team_id, &flags).await + { + tracing::warn!("Failed to update Redis cache: {}", e); + } + Ok(flags) + } + // TODO what kind of error should we return here? + Err(e) => Err(e), + }, + } + } +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use crate::api::FlagError; + use crate::flag_definitions::{ + FeatureFlag, FeatureFlagList, FlagFilters, FlagGroupType, OperatorType, PropertyFilter, + TEAM_FLAGS_CACHE_PREFIX, + }; + use crate::flag_request::FlagRequest; + use crate::redis::Client as RedisClient; + use crate::team::Team; + use crate::test_utils::{insert_new_team_in_redis, setup_pg_client, setup_redis_client}; + use bytes::Bytes; + use serde_json::json; + + #[test] + fn empty_distinct_id_not_accepted() { + let json = json!({ + "distinct_id": "", + "token": "my_token1", + }); + let bytes = Bytes::from(json.to_string()); + + let flag_payload = FlagRequest::from_bytes(bytes).expect("failed to parse request"); + + match flag_payload.extract_distinct_id() { + Err(FlagError::EmptyDistinctId) => (), + _ => panic!("expected empty distinct id error"), + }; + } + + #[test] + fn too_large_distinct_id_is_truncated() { + let json = json!({ + "distinct_id": std::iter::repeat("a").take(210).collect::(), + "token": "my_token1", + }); + let bytes = Bytes::from(json.to_string()); + + let flag_payload = FlagRequest::from_bytes(bytes).expect("failed to parse request"); + + assert_eq!(flag_payload.extract_distinct_id().unwrap().len(), 200); + } + + #[test] + fn distinct_id_is_returned_correctly() { + let json = json!({ + "$distinct_id": "alakazam", + "token": "my_token1", + }); + let bytes = Bytes::from(json.to_string()); + + let flag_payload = FlagRequest::from_bytes(bytes).expect("failed to parse request"); + + match flag_payload.extract_distinct_id() { + Ok(id) => assert_eq!(id, "alakazam"), + _ => panic!("expected distinct id"), + }; + } + + #[tokio::test] + async fn token_is_returned_correctly() { + let redis_client = setup_redis_client(None); + let pg_client = setup_pg_client(None).await; + let team = insert_new_team_in_redis(redis_client.clone()) + .await + .expect("Failed to insert new team in Redis"); + + let json = json!({ + "$distinct_id": "alakazam", + "token": team.api_token, + }); + let bytes = Bytes::from(json.to_string()); + + let flag_payload = FlagRequest::from_bytes(bytes).expect("failed to parse request"); + + match flag_payload + .extract_and_verify_token(redis_client, pg_client) + .await + { + Ok(extracted_token) => assert_eq!(extracted_token, team.api_token), + Err(e) => panic!("Failed to extract and verify token: {:?}", e), + }; + } + + #[tokio::test] + async fn test_get_team_from_cache_or_pg() { + let redis_client = setup_redis_client(None); + let pg_client = setup_pg_client(None).await; + let team = insert_new_team_in_redis(redis_client.clone()) + .await + .expect("Failed to insert new team in Redis"); + + let flag_request = FlagRequest { + token: Some(team.api_token.clone()), + ..Default::default() + }; + + // Test fetching from Redis + let result = flag_request + .get_team_from_cache_or_pg(&team.api_token, redis_client.clone(), pg_client.clone()) + .await; + assert!(result.is_ok()); + assert_eq!(result.unwrap().id, team.id); + + // Test fetching from PostgreSQL (simulate Redis miss) + // First, remove the team from Redis + redis_client + .del(format!("team:{}", team.api_token)) + .await + .expect("Failed to remove team from Redis"); + + let result = flag_request + .get_team_from_cache_or_pg(&team.api_token, redis_client.clone(), pg_client.clone()) + .await; + assert!(result.is_ok()); + assert_eq!(result.unwrap().id, team.id); + + // Verify that the team was re-added to Redis + let redis_team = Team::from_redis(redis_client.clone(), team.api_token.clone()).await; + assert!(redis_team.is_ok()); + } + + #[test] + fn test_extract_properties() { + let flag_request = FlagRequest { + person_properties: Some(HashMap::from([ + ("key1".to_string(), json!("value1")), + ("key2".to_string(), json!(42)), + ])), + ..Default::default() + }; + + let properties = flag_request.extract_properties(); + assert_eq!(properties.len(), 2); + assert_eq!(properties.get("key1").unwrap(), &json!("value1")); + assert_eq!(properties.get("key2").unwrap(), &json!(42)); + } + + #[tokio::test] + async fn test_get_flags_from_cache_or_pg() { + let redis_client = setup_redis_client(None); + let pg_client = setup_pg_client(None).await; + let team = insert_new_team_in_redis(redis_client.clone()) + .await + .expect("Failed to insert new team in Redis"); + + // Insert some mock flags into Redis + let mock_flags = FeatureFlagList { + flags: vec![ + FeatureFlag { + id: 1, + team_id: team.id, + name: Some("Beta Feature".to_string()), + key: "beta_feature".to_string(), + filters: FlagFilters { + groups: vec![FlagGroupType { + properties: Some(vec![PropertyFilter { + key: "country".to_string(), + value: json!("US"), + operator: Some(OperatorType::Exact), + prop_type: "person".to_string(), + group_type_index: None, + }]), + rollout_percentage: Some(50.0), + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }, + deleted: false, + active: true, + ensure_experience_continuity: false, + }, + FeatureFlag { + id: 2, + team_id: team.id, + name: Some("New User Interface".to_string()), + key: "new_ui".to_string(), + filters: FlagFilters { + groups: vec![], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }, + deleted: false, + active: false, + ensure_experience_continuity: false, + }, + FeatureFlag { + id: 3, + team_id: team.id, + name: Some("Premium Feature".to_string()), + key: "premium_feature".to_string(), + filters: FlagFilters { + groups: vec![FlagGroupType { + properties: Some(vec![PropertyFilter { + key: "is_premium".to_string(), + value: json!(true), + operator: Some(OperatorType::Exact), + prop_type: "person".to_string(), + group_type_index: None, + }]), + rollout_percentage: Some(100.0), + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }, + deleted: false, + active: true, + ensure_experience_continuity: false, + }, + ], + }; + + FeatureFlagList::update_flags_in_redis(redis_client.clone(), team.id, &mock_flags) + .await + .expect("Failed to insert mock flags in Redis"); + + let flag_request = FlagRequest::default(); + + // Test fetching from Redis + let result = flag_request + .get_flags_from_cache_or_pg(team.id, redis_client.clone(), pg_client.clone()) + .await; + assert!(result.is_ok()); + let fetched_flags = result.unwrap(); + assert_eq!(fetched_flags.flags.len(), mock_flags.flags.len()); + + // Verify the contents of the fetched flags + let beta_feature = fetched_flags + .flags + .iter() + .find(|f| f.key == "beta_feature") + .unwrap(); + assert!(beta_feature.active); + assert_eq!( + beta_feature.filters.groups[0].rollout_percentage, + Some(50.0) + ); + assert_eq!( + beta_feature.filters.groups[0].properties.as_ref().unwrap()[0].key, + "country" + ); + + let new_ui = fetched_flags + .flags + .iter() + .find(|f| f.key == "new_ui") + .unwrap(); + assert!(!new_ui.active); + assert!(new_ui.filters.groups.is_empty()); + + let premium_feature = fetched_flags + .flags + .iter() + .find(|f| f.key == "premium_feature") + .unwrap(); + assert!(premium_feature.active); + assert_eq!( + premium_feature.filters.groups[0].rollout_percentage, + Some(100.0) + ); + assert_eq!( + premium_feature.filters.groups[0] + .properties + .as_ref() + .unwrap()[0] + .key, + "is_premium" + ); + + // Test fetching from PostgreSQL (simulate Redis miss) + // First, remove the flags from Redis + redis_client + .del(format!("{}:{}", TEAM_FLAGS_CACHE_PREFIX, team.id)) + .await + .expect("Failed to remove flags from Redis"); + + let result = flag_request + .get_flags_from_cache_or_pg(team.id, redis_client.clone(), pg_client.clone()) + .await; + assert!(result.is_ok()); + // Verify that the flags were re-added to Redis + let redis_flags = FeatureFlagList::from_redis(redis_client.clone(), team.id).await; + assert!(redis_flags.is_ok()); + assert_eq!(redis_flags.unwrap().flags.len(), mock_flags.flags.len()); + } + + #[tokio::test] + async fn test_error_cases() { + let redis_client = setup_redis_client(None); + let pg_client = setup_pg_client(None).await; + + // Test invalid token + let flag_request = FlagRequest { + token: Some("invalid_token".to_string()), + ..Default::default() + }; + let result = flag_request + .extract_and_verify_token(redis_client.clone(), pg_client.clone()) + .await; + assert!(matches!(result, Err(FlagError::TokenValidationError))); + + // Test missing distinct_id + let flag_request = FlagRequest { + token: Some("valid_token".to_string()), + distinct_id: None, + ..Default::default() + }; + let result = flag_request.extract_distinct_id(); + assert!(matches!(result, Err(FlagError::MissingDistinctId))); + } +} diff --git a/rust/feature-flags/src/geoip.rs b/rust/feature-flags/src/geoip.rs new file mode 100644 index 0000000000000..8ffbf0bfa34e4 --- /dev/null +++ b/rust/feature-flags/src/geoip.rs @@ -0,0 +1,304 @@ +use crate::config::Config; +use maxminddb::Reader; +use serde_json::Value; +use std::collections::HashMap; +use std::net::IpAddr; +use std::str::FromStr; +use thiserror::Error; +use tracing::log::{error, info}; + +#[derive(Error, Debug)] +pub enum GeoIpError { + #[error("Failed to open GeoIP database: {0}")] + DatabaseOpenError(#[from] maxminddb::MaxMindDBError), +} + +pub struct GeoIpClient { + reader: Reader>, +} + +impl GeoIpClient { + /// Creates a new GeoIpClient instance. + /// Returns an error if the database can't be loaded. + pub fn new(config: &Config) -> Result { + let geoip_path = config.get_maxmind_db_path(); + + info!("Attempting to open GeoIP database at: {:?}", geoip_path); + + let reader = Reader::open_readfile(&geoip_path)?; + info!("Successfully opened GeoIP database"); + + Ok(GeoIpClient { reader }) + } + + /// Checks if the given IP address is valid. + fn is_valid_ip(&self, ip: &str) -> bool { + ip != "127.0.0.1" || ip != "::1" + } + + /// Looks up the city data for the given IP address. + /// Returns None if the lookup fails. + fn lookup_city(&self, ip: &str, addr: IpAddr) -> Option { + match self.reader.lookup::(addr) { + Ok(city) => { + info!( + "GeoIP lookup succeeded for IP {}: Full city data: {:?}", + ip, city + ); + Some(city) + } + Err(e) => { + error!("GeoIP lookup error for IP {}: {}", ip, e); + None + } + } + } + + /// Returns a dictionary of geoip properties for the given ip address. + pub fn get_geoip_properties(&self, ip_address: Option<&str>) -> HashMap { + match ip_address { + None => { + info!("No IP address provided; returning empty properties"); + HashMap::new() + } + Some(ip) if !self.is_valid_ip(ip) => { + info!("Returning empty properties for IP: {}", ip); + HashMap::new() + } + Some(ip) => match IpAddr::from_str(ip) { + Ok(addr) => self + .lookup_city(ip, addr) + .map(|city| extract_properties(&city)) + .unwrap_or_default(), + Err(_) => { + error!("Invalid IP address: {}", ip); + HashMap::new() + } + }, + } + } +} + +const GEOIP_FIELDS: [(&str, &[&str]); 7] = [ + ("$geoip_country_name", &["country", "names", "en"]), + ("$geoip_city_name", &["city", "names", "en"]), + ("$geoip_country_code", &["country", "iso_code"]), + ("$geoip_continent_name", &["continent", "names", "en"]), + ("$geoip_continent_code", &["continent", "code"]), + ("$geoip_postal_code", &["postal", "code"]), + ("$geoip_time_zone", &["location", "time_zone"]), +]; + +fn get_nested_value<'a>(data: &'a Value, path: &[&str]) -> Option<&'a str> { + let mut current = data; + for &key in path { + current = current.get(key)?; + } + current.as_str() +} + +fn extract_properties(city: &Value) -> HashMap { + GEOIP_FIELDS + .iter() + .filter_map(|&(field, path)| { + get_nested_value(city, path).map(|value| (field.to_string(), value.to_string())) + }) + .collect() +} + +#[cfg(test)] +mod tests { + use serde_json::json; + + use super::*; + use crate::config::Config; + use std::sync::Once; + + static INIT: Once = Once::new(); + + fn initialize() { + INIT.call_once(|| { + tracing_subscriber::fmt::init(); + }); + } + + fn create_test_service() -> GeoIpClient { + let config = Config::default_test_config(); + GeoIpClient::new(&config).expect("Failed to create GeoIpService") + } + + #[test] + fn test_geoip_service_creation() { + initialize(); + let config = Config::default_test_config(); + let service_result = GeoIpClient::new(&config); + assert!(service_result.is_ok()); + } + + #[test] + fn test_geoip_service_creation_failure() { + initialize(); + let mut config = Config::default_test_config(); + config.maxmind_db_path = "/path/to/nonexistent/file".to_string(); + let service_result = GeoIpClient::new(&config); + assert!(service_result.is_err()); + } + + #[test] + fn test_get_geoip_properties_none() { + initialize(); + let service = create_test_service(); + let result = service.get_geoip_properties(None); + assert!(result.is_empty()); + } + + #[test] + fn test_get_geoip_properties_localhost() { + initialize(); + let service = create_test_service(); + let result = service.get_geoip_properties(Some("127.0.0.1")); + assert!(result.is_empty()); + } + + #[test] + fn test_get_geoip_properties_invalid_ip() { + initialize(); + let service = create_test_service(); + let result = service.get_geoip_properties(Some("not_an_ip")); + assert!(result.is_empty()); + } + + #[test] + fn test_geoip_results() { + initialize(); + let service = create_test_service(); + let test_cases = vec![ + ("13.106.122.3", "Australia"), + ("31.28.64.3", "United Kingdom"), + ("2600:6c52:7a00:11c:1b6:b7b0:ea19:6365", "United States"), + ]; + + for (ip, expected_country) in test_cases { + let result = service.get_geoip_properties(Some(ip)); + info!("GeoIP lookup result for IP {}: {:?}", ip, result); + info!( + "Expected country: {}, Actual country: {:?}", + expected_country, + result.get("$geoip_country_name") + ); + assert_eq!( + result.get("$geoip_country_name"), + Some(&expected_country.to_string()) + ); + assert_eq!(result.len(), 7); + } + } + + #[test] + fn test_geoip_on_local_ip() { + initialize(); + let service = create_test_service(); + let result = service.get_geoip_properties(Some("127.0.0.1")); + assert!(result.is_empty()); + } + + #[test] + fn test_geoip_on_invalid_ip() { + initialize(); + let service = create_test_service(); + let result = service.get_geoip_properties(Some("999.999.999.999")); + assert!(result.is_empty()); + } + + #[test] + fn test_get_nested_value() { + let data = json!({ + "country": { + "names": { + "en": "United States" + } + }, + "city": { + "names": { + "en": "New York" + } + }, + "postal": { + "code": "10001" + } + }); + + assert_eq!( + get_nested_value(&data, &["country", "names", "en"]), + Some("United States") + ); + assert_eq!( + get_nested_value(&data, &["city", "names", "en"]), + Some("New York") + ); + assert_eq!(get_nested_value(&data, &["postal", "code"]), Some("10001")); + assert_eq!(get_nested_value(&data, &["country", "code"]), None); + assert_eq!(get_nested_value(&data, &["nonexistent", "path"]), None); + } + + #[test] + fn test_extract_properties() { + let city_data = json!({ + "country": { + "names": { + "en": "United States" + }, + "iso_code": "US" + }, + "city": { + "names": { + "en": "New York" + } + }, + "continent": { + "names": { + "en": "North America" + }, + "code": "NA" + }, + "postal": { + "code": "10001" + }, + "location": { + "time_zone": "America/New_York" + } + }); + + let properties = extract_properties(&city_data); + + assert_eq!( + properties.get("$geoip_country_name"), + Some(&"United States".to_string()) + ); + assert_eq!( + properties.get("$geoip_city_name"), + Some(&"New York".to_string()) + ); + assert_eq!( + properties.get("$geoip_country_code"), + Some(&"US".to_string()) + ); + assert_eq!( + properties.get("$geoip_continent_name"), + Some(&"North America".to_string()) + ); + assert_eq!( + properties.get("$geoip_continent_code"), + Some(&"NA".to_string()) + ); + assert_eq!( + properties.get("$geoip_postal_code"), + Some(&"10001".to_string()) + ); + assert_eq!( + properties.get("$geoip_time_zone"), + Some(&"America/New_York".to_string()) + ); + assert_eq!(properties.len(), 7); + } +} diff --git a/rust/feature-flags/src/lib.rs b/rust/feature-flags/src/lib.rs index 7784bd7bf1b8d..de5065723e45a 100644 --- a/rust/feature-flags/src/lib.rs +++ b/rust/feature-flags/src/lib.rs @@ -3,13 +3,15 @@ pub mod config; pub mod database; pub mod flag_definitions; pub mod flag_matching; +pub mod flag_request; +pub mod geoip; pub mod property_matching; pub mod redis; +pub mod request_handler; pub mod router; pub mod server; pub mod team; pub mod v0_endpoint; -pub mod v0_request; // Test modules don't need to be compiled with main binary // #[cfg(test)] diff --git a/rust/feature-flags/src/redis.rs b/rust/feature-flags/src/redis.rs index 89dde421d0abc..954ffe1a09f04 100644 --- a/rust/feature-flags/src/redis.rs +++ b/rust/feature-flags/src/redis.rs @@ -34,6 +34,7 @@ pub trait Client { async fn get(&self, k: String) -> Result; async fn set(&self, k: String, v: String) -> Result<()>; + async fn del(&self, k: String) -> Result<(), CustomRedisError>; } pub struct RedisClient { @@ -93,4 +94,14 @@ impl Client for RedisClient { Ok(fut?) } + + async fn del(&self, k: String) -> Result<(), CustomRedisError> { + let mut conn = self.client.get_async_connection().await?; + + let results = conn.del(k); + let fut: Result<(), RedisError> = + timeout(Duration::from_secs(REDIS_TIMEOUT_MILLISECS), results).await?; + + fut.map_err(CustomRedisError::from) + } } diff --git a/rust/feature-flags/src/request_handler.rs b/rust/feature-flags/src/request_handler.rs new file mode 100644 index 0000000000000..35606727f3259 --- /dev/null +++ b/rust/feature-flags/src/request_handler.rs @@ -0,0 +1,375 @@ +use crate::{ + api::{FlagError, FlagValue, FlagsResponse}, + database::Client, + flag_definitions::FeatureFlagList, + flag_matching::FeatureFlagMatcher, + flag_request::FlagRequest, + geoip::GeoIpClient, + router, +}; +use axum::{extract::State, http::HeaderMap}; +use bytes::Bytes; +use serde::Deserialize; +use serde_json::Value; +use std::sync::Arc; +use std::{collections::HashMap, net::IpAddr}; +use tracing::error; + +#[derive(Deserialize, Default)] +pub enum Compression { + #[default] + Unsupported, + #[serde(rename = "gzip", alias = "gzip-js")] + Gzip, +} + +impl Compression { + pub fn as_str(&self) -> &'static str { + match self { + Compression::Gzip => "gzip", + Compression::Unsupported => "unsupported", + } + } +} + +#[derive(Deserialize, Default)] +pub struct FlagsQueryParams { + #[serde(alias = "v")] + pub version: Option, + + pub compression: Option, + + #[serde(alias = "ver")] + pub lib_version: Option, + + #[serde(alias = "_")] + pub sent_at: Option, +} + +pub struct RequestContext { + pub state: State, + pub ip: IpAddr, + pub meta: FlagsQueryParams, + pub headers: HeaderMap, + pub body: Bytes, +} + +pub async fn process_request(context: RequestContext) -> Result { + let RequestContext { + state, + ip, + meta: _, // TODO use this + headers, + body, + } = context; + + let request = decode_request(&headers, body)?; + let token = request + .extract_and_verify_token(state.redis.clone(), state.postgres.clone()) + .await?; + let team = request + .get_team_from_cache_or_pg(&token, state.redis.clone(), state.postgres.clone()) + .await?; + let distinct_id = request.extract_distinct_id()?; + let person_property_overrides = get_person_property_overrides( + !request.geoip_disable.unwrap_or(false), + request.person_properties.clone(), + &ip, + &state.geoip.clone(), + ); + // TODO group_property_overrides + + let feature_flags_from_cache_or_pg = request + .get_flags_from_cache_or_pg(team.id, state.redis.clone(), state.postgres.clone()) + .await?; + + let flags_response = evaluate_feature_flags( + distinct_id, + feature_flags_from_cache_or_pg, + Some(state.postgres.clone()), + person_property_overrides, + // group_property_overrides, + ) + .await; + + Ok(flags_response) +} + +/// Get person property overrides based on the request +/// - If geoip is enabled, fetch geoip properties and merge them with any person properties +/// - If geoip is disabled, return the person properties as is +/// - If no person properties are provided, return None +pub fn get_person_property_overrides( + geoip_enabled: bool, + person_properties: Option>, + ip: &IpAddr, + geoip_service: &GeoIpClient, +) -> Option> { + match (geoip_enabled, person_properties) { + (true, Some(mut props)) => { + let geoip_props = geoip_service.get_geoip_properties(Some(&ip.to_string())); + if !geoip_props.is_empty() { + props.extend(geoip_props.into_iter().map(|(k, v)| (k, Value::String(v)))); + } + Some(props) + } + (true, None) => { + let geoip_props = geoip_service.get_geoip_properties(Some(&ip.to_string())); + if !geoip_props.is_empty() { + Some( + geoip_props + .into_iter() + .map(|(k, v)| (k, Value::String(v))) + .collect(), + ) + } else { + None + } + } + (false, Some(props)) => Some(props), + (false, None) => None, + } +} + +/// Decode a request into a `FlagRequest` +/// - Currently only supports JSON requests +// TODO support all supported content types +fn decode_request(headers: &HeaderMap, body: Bytes) -> Result { + match headers + .get("content-type") + .map_or("", |v| v.to_str().unwrap_or("")) + { + "application/json" => FlagRequest::from_bytes(body), + ct => Err(FlagError::RequestDecodingError(format!( + "unsupported content type: {}", + ct + ))), + } +} + +/// Evaluate feature flags for a given distinct_id +/// Returns a map of feature flag keys to their values +/// If an error occurs while evaluating a flag, it will be logged and the flag will be omitted from the result +pub async fn evaluate_feature_flags( + distinct_id: String, + feature_flags_from_cache_or_pg: FeatureFlagList, + database_client: Option>, + person_property_overrides: Option>, + // group_property_overrides: Option>>, +) -> FlagsResponse { + let mut matcher = FeatureFlagMatcher::new( + distinct_id.clone(), + database_client, + person_property_overrides, + // group_property_overrides, + ); + let mut feature_flags = HashMap::new(); + let mut error_while_computing_flags = false; + let feature_flag_list = feature_flags_from_cache_or_pg.flags; + + for flag in feature_flag_list { + if !flag.active || flag.deleted { + continue; + } + + match matcher.get_match(&flag).await { + Ok(flag_match) => { + let flag_value = if flag_match.matches { + match flag_match.variant { + Some(variant) => FlagValue::String(variant), + None => FlagValue::Boolean(true), + } + } else { + FlagValue::Boolean(false) + }; + feature_flags.insert(flag.key.clone(), flag_value); + } + Err(e) => { + error_while_computing_flags = true; + error!( + "Error evaluating feature flag '{}' for distinct_id '{}': {:?}", + flag.key, distinct_id, e + ); + } + } + } + + FlagsResponse { + error_while_computing_flags, + feature_flags, + } +} + +#[cfg(test)] +mod tests { + use crate::{ + config::Config, + flag_definitions::{FeatureFlag, FlagFilters, FlagGroupType, OperatorType, PropertyFilter}, + test_utils::setup_pg_client, + }; + + use super::*; + use axum::http::HeaderMap; + use serde_json::json; + use std::net::Ipv4Addr; + + fn create_test_geoip_service() -> GeoIpClient { + let config = Config::default_test_config(); + GeoIpClient::new(&config).expect("Failed to create GeoIpService for testing") + } + + #[test] + fn test_geoip_enabled_with_person_properties() { + let geoip_service = create_test_geoip_service(); + + let mut person_props = HashMap::new(); + person_props.insert("name".to_string(), Value::String("John".to_string())); + + let result = get_person_property_overrides( + true, + Some(person_props), + &IpAddr::V4(Ipv4Addr::new(8, 8, 8, 8)), // Google's public DNS, should be in the US + &geoip_service, + ); + + assert!(result.is_some()); + let result = result.unwrap(); + assert!(result.len() > 1); + assert_eq!(result.get("name"), Some(&Value::String("John".to_string()))); + assert!(result.contains_key("$geoip_country_name")); + } + + #[test] + fn test_geoip_enabled_without_person_properties() { + let geoip_service = create_test_geoip_service(); + + let result = get_person_property_overrides( + true, + None, + &IpAddr::V4(Ipv4Addr::new(8, 8, 8, 8)), // Google's public DNS, should be in the US + &geoip_service, + ); + + assert!(result.is_some()); + let result = result.unwrap(); + assert!(!result.is_empty()); + assert!(result.contains_key("$geoip_country_name")); + } + + #[test] + fn test_geoip_disabled_with_person_properties() { + let geoip_service = create_test_geoip_service(); + + let mut person_props = HashMap::new(); + person_props.insert("name".to_string(), Value::String("John".to_string())); + + let result = get_person_property_overrides( + false, + Some(person_props), + &IpAddr::V4(Ipv4Addr::new(8, 8, 8, 8)), + &geoip_service, + ); + + assert!(result.is_some()); + let result = result.unwrap(); + assert_eq!(result.len(), 1); + assert_eq!(result.get("name"), Some(&Value::String("John".to_string()))); + } + + #[test] + fn test_geoip_disabled_without_person_properties() { + let geoip_service = create_test_geoip_service(); + + let result = get_person_property_overrides( + false, + None, + &IpAddr::V4(Ipv4Addr::new(8, 8, 8, 8)), + &geoip_service, + ); + + assert!(result.is_none()); + } + + #[test] + fn test_geoip_enabled_local_ip() { + let geoip_service = create_test_geoip_service(); + + let result = get_person_property_overrides( + true, + None, + &IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), + &geoip_service, + ); + + assert!(result.is_none()); + } + + #[tokio::test] + async fn test_evaluate_feature_flags() { + let pg_client = setup_pg_client(None).await; + let flag = FeatureFlag { + name: Some("Test Flag".to_string()), + id: 1, + key: "test_flag".to_string(), + active: true, + deleted: false, + team_id: 1, + filters: FlagFilters { + groups: vec![FlagGroupType { + properties: Some(vec![PropertyFilter { + key: "country".to_string(), + value: json!("US"), + operator: Some(OperatorType::Exact), + prop_type: "person".to_string(), + group_type_index: None, + }]), + rollout_percentage: Some(100.0), // Set to 100% to ensure it's always on + variant: None, + }], + multivariate: None, + aggregation_group_type_index: None, + payloads: None, + super_groups: None, + }, + ensure_experience_continuity: false, + }; + + let feature_flag_list = FeatureFlagList { flags: vec![flag] }; + + let mut person_properties = HashMap::new(); + person_properties.insert("country".to_string(), json!("US")); + + let result = evaluate_feature_flags( + "user123".to_string(), + feature_flag_list, + Some(pg_client), + Some(person_properties), + ) + .await; + + assert!(!result.error_while_computing_flags); + assert!(result.feature_flags.contains_key("test_flag")); + assert_eq!(result.feature_flags["test_flag"], FlagValue::Boolean(true)); + } + + #[test] + fn test_decode_request() { + let mut headers = HeaderMap::new(); + headers.insert("content-type", "application/json".parse().unwrap()); + + let body = Bytes::from(r#"{"token": "test_token", "distinct_id": "user123"}"#); + + let result = decode_request(&headers, body); + + assert!(result.is_ok()); + let request = result.unwrap(); + assert_eq!(request.token, Some("test_token".to_string())); + assert_eq!(request.distinct_id, Some("user123".to_string())); + } + + #[test] + fn test_compression_as_str() { + assert_eq!(Compression::Gzip.as_str(), "gzip"); + assert_eq!(Compression::Unsupported.as_str(), "unsupported"); + } +} diff --git a/rust/feature-flags/src/router.rs b/rust/feature-flags/src/router.rs index 2fbc87c870930..1a32e0837cede 100644 --- a/rust/feature-flags/src/router.rs +++ b/rust/feature-flags/src/router.rs @@ -2,21 +2,29 @@ use std::sync::Arc; use axum::{routing::post, Router}; -use crate::{database::Client as DatabaseClient, redis::Client as RedisClient, v0_endpoint}; +use crate::{ + database::Client as DatabaseClient, geoip::GeoIpClient, redis::Client as RedisClient, + v0_endpoint, +}; #[derive(Clone)] pub struct State { pub redis: Arc, // TODO: Add pgClient when ready pub postgres: Arc, + pub geoip: Arc, } -pub fn router(redis: Arc, postgres: Arc) -> Router +pub fn router(redis: Arc, postgres: Arc, geoip: Arc) -> Router where R: RedisClient + Send + Sync + 'static, D: DatabaseClient + Send + Sync + 'static, { - let state = State { redis, postgres }; + let state = State { + redis, + postgres, + geoip, + }; Router::new() .route("/flags", post(v0_endpoint::flags).get(v0_endpoint::flags)) diff --git a/rust/feature-flags/src/server.rs b/rust/feature-flags/src/server.rs index 37bd721a9a51f..c718657e3af66 100644 --- a/rust/feature-flags/src/server.rs +++ b/rust/feature-flags/src/server.rs @@ -6,6 +6,7 @@ use tokio::net::TcpListener; use crate::config::Config; use crate::database::PgClient; +use crate::geoip::GeoIpClient; use crate::redis::RedisClient; use crate::router; @@ -29,8 +30,16 @@ where } }; + let geoip_service = match GeoIpClient::new(&config) { + Ok(service) => Arc::new(service), + Err(e) => { + tracing::error!("Failed to create GeoIP service: {}", e); + return; + } + }; + // You can decide which client to pass to the router, or pass both if needed - let app = router::router(redis_client, read_postgres_client); + let app = router::router(redis_client, read_postgres_client, geoip_service); tracing::info!("listening on {:?}", listener.local_addr().unwrap()); axum::serve( diff --git a/rust/feature-flags/src/team.rs b/rust/feature-flags/src/team.rs index 678668490485d..bd975385eb216 100644 --- a/rust/feature-flags/src/team.rs +++ b/rust/feature-flags/src/team.rs @@ -8,11 +8,22 @@ use crate::{api::FlagError, database::Client as DatabaseClient, redis::Client as // TODO: Add integration tests across repos to ensure this doesn't happen. pub const TEAM_TOKEN_CACHE_PREFIX: &str = "posthog:1:team_token:"; -#[derive(Debug, Deserialize, Serialize, sqlx::FromRow)] +#[derive(Clone, Debug, Deserialize, Serialize, sqlx::FromRow)] pub struct Team { pub id: i32, pub name: String, pub api_token: String, + // TODO: the following fields are used for the `/decide` response, + // but they're not used for flags and they don't live in redis. + // At some point I'll need to differentiate between teams in Redis and teams + // with additional fields in Postgres, since the Postgres team is a superset of the fields + // we use for flags, anyway. + // pub surveys_opt_in: bool, + // pub heatmaps_opt_in: bool, + // pub capture_performance_opt_in: bool, + // pub autocapture_web_vitals_opt_in: bool, + // pub autocapture_opt_out: bool, + // pub autocapture_exceptions_opt_in: bool, } impl Team { @@ -40,7 +51,7 @@ impl Team { #[instrument(skip_all)] pub async fn update_redis_cache( client: Arc, - team: Team, + team: &Team, ) -> Result<(), FlagError> { let serialized_team = serde_json::to_string(&team).map_err(|e| { tracing::error!("Failed to serialize team: {}", e); diff --git a/rust/feature-flags/src/v0_endpoint.rs b/rust/feature-flags/src/v0_endpoint.rs index d32f976d94447..56734eae32d45 100644 --- a/rust/feature-flags/src/v0_endpoint.rs +++ b/rust/feature-flags/src/v0_endpoint.rs @@ -1,20 +1,18 @@ -use std::collections::HashMap; +use std::net::IpAddr; -use axum::{debug_handler, Json}; -use bytes::Bytes; +use crate::{ + api::{FlagError, FlagsResponse}, + request_handler::{process_request, FlagsQueryParams, RequestContext}, + router, +}; // TODO: stream this instead use axum::extract::{MatchedPath, Query, State}; use axum::http::{HeaderMap, Method}; +use axum::{debug_handler, Json}; use axum_client_ip::InsecureClientIp; +use bytes::Bytes; use tracing::instrument; -use crate::api::FlagValue; -use crate::{ - api::{FlagError, FlagsResponse}, - router, - v0_request::{FlagRequest, FlagsQueryParams}, -}; - /// Feature flag evaluation endpoint. /// Only supports a specific shape of data, and rejects any malformed data. @@ -42,57 +40,50 @@ pub async fn flags( path: MatchedPath, body: Bytes, ) -> Result, FlagError> { + record_request_metadata(&headers, &method, &path, &ip, &meta); + + let context = RequestContext { + state, + ip, + meta: meta.0, + headers, + body, + }; + + Ok(Json(process_request(context).await?)) +} + +fn record_request_metadata( + headers: &HeaderMap, + method: &Method, + path: &MatchedPath, + ip: &IpAddr, + meta: &Query, +) { let user_agent = headers .get("user-agent") .map_or("unknown", |v| v.to_str().unwrap_or("unknown")); let content_encoding = headers .get("content-encoding") .map_or("unknown", |v| v.to_str().unwrap_or("unknown")); + let content_type = headers + .get("content-type") + .map_or("unknown", |v| v.to_str().unwrap_or("unknown")); tracing::Span::current().record("user_agent", user_agent); tracing::Span::current().record("content_encoding", content_encoding); - tracing::Span::current().record("version", meta.version.clone()); + tracing::Span::current().record("content_type", content_type); + tracing::Span::current().record("version", meta.version.as_deref().unwrap_or("unknown")); + tracing::Span::current().record( + "lib_version", + meta.lib_version.as_deref().unwrap_or("unknown"), + ); + tracing::Span::current().record( + "compression", + meta.compression.as_ref().map_or("none", |c| c.as_str()), + ); tracing::Span::current().record("method", method.as_str()); tracing::Span::current().record("path", path.as_str().trim_end_matches('/')); tracing::Span::current().record("ip", ip.to_string()); - - let request = match headers - .get("content-type") - .map_or("", |v| v.to_str().unwrap_or("")) - { - "application/json" => { - tracing::Span::current().record("content_type", "application/json"); - FlagRequest::from_bytes(body) - } - ct => { - return Err(FlagError::RequestDecodingError(format!( - "unsupported content type: {}", - ct - ))); - } - }?; - - let token = request - .extract_and_verify_token(state.redis.clone(), state.postgres.clone()) - .await?; - - let distinct_id = request.extract_distinct_id()?; - - tracing::Span::current().record("token", &token); - tracing::Span::current().record("distinct_id", &distinct_id); - - tracing::debug!("request: {:?}", request); - - // TODO: Some actual processing for evaluating the feature flag - - Ok(Json(FlagsResponse { - error_while_computing_flags: false, - feature_flags: HashMap::from([ - ( - "beta-feature".to_string(), - FlagValue::String("variant-1".to_string()), - ), - ("rollout-flag".to_string(), FlagValue::Boolean(true)), - ]), - })) + tracing::Span::current().record("sent_at", &meta.sent_at.unwrap_or(0).to_string()); } diff --git a/rust/feature-flags/src/v0_request.rs b/rust/feature-flags/src/v0_request.rs deleted file mode 100644 index 4447cb64d1d68..0000000000000 --- a/rust/feature-flags/src/v0_request.rs +++ /dev/null @@ -1,151 +0,0 @@ -use std::{collections::HashMap, sync::Arc}; - -use bytes::Bytes; -use serde::{Deserialize, Serialize}; -use serde_json::Value; -use tracing::instrument; - -use crate::{ - api::FlagError, database::Client as DatabaseClient, redis::Client as RedisClient, team::Team, -}; - -#[derive(Deserialize, Default)] -pub struct FlagsQueryParams { - #[serde(alias = "v")] - pub version: Option, -} - -#[derive(Default, Debug, Deserialize, Serialize)] -pub struct FlagRequest { - #[serde( - alias = "$token", - alias = "api_key", - skip_serializing_if = "Option::is_none" - )] - pub token: Option, - #[serde(alias = "$distinct_id", skip_serializing_if = "Option::is_none")] - pub distinct_id: Option, - pub geoip_disable: Option, - #[serde(default)] - pub person_properties: Option>, - #[serde(default)] - pub groups: Option>, - // TODO: better type this since we know its going to be a nested json - #[serde(default)] - pub group_properties: Option>, - #[serde(alias = "$anon_distinct_id", skip_serializing_if = "Option::is_none")] - pub anon_distinct_id: Option, -} - -impl FlagRequest { - /// Takes a request payload and tries to read it. - /// Only supports base64 encoded payloads or uncompressed utf-8 as json. - #[instrument(skip_all)] - pub fn from_bytes(bytes: Bytes) -> Result { - tracing::debug!(len = bytes.len(), "decoding new request"); - // TODO: Add base64 decoding - let payload = String::from_utf8(bytes.into()).map_err(|e| { - tracing::error!("failed to decode body: {}", e); - FlagError::RequestDecodingError(String::from("invalid body encoding")) - })?; - - tracing::debug!(json = payload, "decoded event data"); - Ok(serde_json::from_str::(&payload)?) - } - - pub async fn extract_and_verify_token( - &self, - redis_client: Arc, - pg_client: Arc, - ) -> Result { - let token = match self { - FlagRequest { - token: Some(token), .. - } => token.to_string(), - _ => return Err(FlagError::NoTokenError), - }; - - match Team::from_redis(redis_client.clone(), token.clone()).await { - Ok(_) => Ok(token), - Err(_) => { - // Fallback: Check PostgreSQL if not found in Redis - match Team::from_pg(pg_client, token.clone()).await { - Ok(team) => { - // Token found in PostgreSQL, update Redis cache - if let Err(e) = Team::update_redis_cache(redis_client, team).await { - tracing::warn!("Failed to update Redis cache: {}", e); - } - Ok(token) - } - Err(_) => Err(FlagError::TokenValidationError), - } - } - } - } - - pub fn extract_distinct_id(&self) -> Result { - let distinct_id = match &self.distinct_id { - None => return Err(FlagError::MissingDistinctId), - Some(id) => id, - }; - - match distinct_id.len() { - 0 => Err(FlagError::EmptyDistinctId), - 1..=200 => Ok(distinct_id.to_owned()), - _ => Ok(distinct_id.chars().take(200).collect()), - } - } -} - -#[cfg(test)] -mod tests { - use crate::api::FlagError; - use crate::v0_request::FlagRequest; - use bytes::Bytes; - use serde_json::json; - - #[test] - fn empty_distinct_id_not_accepted() { - let json = json!({ - "distinct_id": "", - "token": "my_token1", - }); - let bytes = Bytes::from(json.to_string()); - - let flag_payload = FlagRequest::from_bytes(bytes).expect("failed to parse request"); - - match flag_payload.extract_distinct_id() { - Err(FlagError::EmptyDistinctId) => (), - _ => panic!("expected empty distinct id error"), - }; - } - - #[test] - fn too_large_distinct_id_is_truncated() { - let json = json!({ - "distinct_id": std::iter::repeat("a").take(210).collect::(), - "token": "my_token1", - }); - let bytes = Bytes::from(json.to_string()); - - let flag_payload = FlagRequest::from_bytes(bytes).expect("failed to parse request"); - - assert_eq!(flag_payload.extract_distinct_id().unwrap().len(), 200); - } - - #[test] - fn distinct_id_is_returned_correctly() { - let json = json!({ - "$distinct_id": "alakazam", - "token": "my_token1", - }); - let bytes = Bytes::from(json.to_string()); - - let flag_payload = FlagRequest::from_bytes(bytes).expect("failed to parse request"); - - match flag_payload.extract_distinct_id() { - Ok(id) => assert_eq!(id, "alakazam"), - _ => panic!("expected distinct id"), - }; - } -} diff --git a/rust/feature-flags/tests/test_flag_matching_consistency.rs b/rust/feature-flags/tests/test_flag_matching_consistency.rs index d4b55ed4e9001..2a4972962019c 100644 --- a/rust/feature-flags/tests/test_flag_matching_consistency.rs +++ b/rust/feature-flags/tests/test_flag_matching_consistency.rs @@ -107,9 +107,10 @@ async fn it_is_consistent_with_rollout_calculation_for_simple_flags() { for i in 0..1000 { let distinct_id = format!("distinct_id_{}", i); - let feature_flag_match = FeatureFlagMatcher::new(distinct_id, None) + let feature_flag_match = FeatureFlagMatcher::new(distinct_id, None, None) .get_match(&flags[0]) - .await; + .await + .unwrap(); if results[i] { assert_eq!( @@ -1188,9 +1189,10 @@ async fn it_is_consistent_with_rollout_calculation_for_multivariate_flags() { for i in 0..1000 { let distinct_id = format!("distinct_id_{}", i); - let feature_flag_match = FeatureFlagMatcher::new(distinct_id, None) + let feature_flag_match = FeatureFlagMatcher::new(distinct_id, None, None) .get_match(&flags[0]) - .await; + .await + .unwrap(); if results[i].is_some() { assert_eq!( diff --git a/rust/feature-flags/tests/test_flags.rs b/rust/feature-flags/tests/test_flags.rs index 7f50064daddb6..706d8fdfed0da 100644 --- a/rust/feature-flags/tests/test_flags.rs +++ b/rust/feature-flags/tests/test_flags.rs @@ -7,7 +7,9 @@ use serde_json::{json, Value}; use crate::common::*; use feature_flags::config::DEFAULT_TEST_CONFIG; -use feature_flags::test_utils::{insert_new_team_in_redis, setup_redis_client}; +use feature_flags::test_utils::{ + insert_flags_for_team_in_redis, insert_new_team_in_redis, setup_redis_client, +}; pub mod common; @@ -21,6 +23,25 @@ async fn it_sends_flag_request() -> Result<()> { let team = insert_new_team_in_redis(client.clone()).await.unwrap(); let token = team.api_token; + // Insert a specific flag for the team + let flag_json = json!([{ + "id": 1, + "key": "test-flag", + "name": "Test Flag", + "active": true, + "deleted": false, + "team_id": team.id, + "filters": { + "groups": [ + { + "properties": [], + "rollout_percentage": 100 + } + ], + }, + }]); + insert_flags_for_team_in_redis(client, team.id, Some(flag_json.to_string())).await?; + let server = ServerHandle::for_config(config).await; let payload = json!({ @@ -28,20 +49,17 @@ async fn it_sends_flag_request() -> Result<()> { "distinct_id": distinct_id, "groups": {"group1": "group1"} }); + let res = server.send_flags_request(payload.to_string()).await; assert_eq!(StatusCode::OK, res.status()); - // We don't want to deserialize the data into a flagResponse struct here, - // because we want to assert the shape of the raw json data. let json_data = res.json::().await?; - assert_json_include!( actual: json_data, expected: json!({ "errorWhileComputingFlags": false, "featureFlags": { - "beta-feature": "variant-1", - "rollout-flag": true, + "test-flag": true } }) ); @@ -139,7 +157,7 @@ async fn it_rejects_missing_token() -> Result<()> { assert_eq!(StatusCode::UNAUTHORIZED, res.status()); assert_eq!( res.text().await?, - "No API key provided. Please include a valid API key in your request." + "No API token provided. Please include a valid API token in your request." ); Ok(()) } diff --git a/rust/hook-api/Cargo.toml b/rust/hook-api/Cargo.toml index c3528d23da5d2..7887e8e49a8e2 100644 --- a/rust/hook-api/Cargo.toml +++ b/rust/hook-api/Cargo.toml @@ -22,3 +22,4 @@ tower = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } url = { workspace = true } +common-metrics = { path = "../common/metrics" } diff --git a/rust/hook-api/src/main.rs b/rust/hook-api/src/main.rs index 7ca8de09513ff..1f84abb4e4665 100644 --- a/rust/hook-api/src/main.rs +++ b/rust/hook-api/src/main.rs @@ -3,7 +3,7 @@ use config::Config; use envconfig::Envconfig; use eyre::Result; -use hook_common::metrics::setup_metrics_routes; +use common_metrics::setup_metrics_routes; use hook_common::pgqueue::PgQueue; mod config; diff --git a/rust/hook-common/Cargo.toml b/rust/hook-common/Cargo.toml index e5c27fd598245..e6b2625c23905 100644 --- a/rust/hook-common/Cargo.toml +++ b/rust/hook-common/Cargo.toml @@ -8,13 +8,10 @@ workspace = true [dependencies] async-trait = { workspace = true } -axum = { workspace = true, features = ["http2"] } chrono = { workspace = true } envconfig = { workspace = true } health = { path = "../common/health" } http = { workspace = true } -metrics = { workspace = true } -metrics-exporter-prometheus = { workspace = true } rdkafka = { workspace = true } reqwest = { workspace = true } serde = { workspace = true } diff --git a/rust/hook-common/src/lib.rs b/rust/hook-common/src/lib.rs index 5531ceb7346de..e1446d80c338f 100644 --- a/rust/hook-common/src/lib.rs +++ b/rust/hook-common/src/lib.rs @@ -1,7 +1,6 @@ pub mod config; pub mod kafka_messages; pub mod kafka_producer; -pub mod metrics; pub mod pgqueue; pub mod retry; pub mod test; diff --git a/rust/hook-janitor/Cargo.toml b/rust/hook-janitor/Cargo.toml index 21894a38f8013..a4fa315da70f1 100644 --- a/rust/hook-janitor/Cargo.toml +++ b/rust/hook-janitor/Cargo.toml @@ -24,3 +24,4 @@ time = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } +common-metrics = { path = "../common/metrics" } \ No newline at end of file diff --git a/rust/hook-janitor/src/main.rs b/rust/hook-janitor/src/main.rs index 200e5a0030562..b7ea4db85ec1d 100644 --- a/rust/hook-janitor/src/main.rs +++ b/rust/hook-janitor/src/main.rs @@ -9,8 +9,8 @@ use std::{str::FromStr, time::Duration}; use tokio::sync::Semaphore; use webhooks::WebhookCleaner; +use common_metrics::setup_metrics_routes; use hook_common::kafka_producer::create_kafka_producer; -use hook_common::metrics::setup_metrics_routes; mod cleanup; mod config; diff --git a/rust/hook-janitor/src/webhooks.rs b/rust/hook-janitor/src/webhooks.rs index c40c7441c5b48..c523a4c59da55 100644 --- a/rust/hook-janitor/src/webhooks.rs +++ b/rust/hook-janitor/src/webhooks.rs @@ -17,10 +17,10 @@ use tracing::{debug, error, info}; use crate::cleanup::Cleaner; +use common_metrics::get_current_timestamp_seconds; use hook_common::kafka_messages::app_metrics::{AppMetric, AppMetricCategory}; use hook_common::kafka_messages::app_metrics2::{self, AppMetric2}; use hook_common::kafka_producer::KafkaContext; -use hook_common::metrics::get_current_timestamp_seconds; #[derive(Error, Debug)] pub enum WebhookCleanerError { @@ -1080,7 +1080,7 @@ mod tests { let mut conn = db.acquire().await.unwrap(); let count: i64 = sqlx::query("SELECT count(*) FROM job_queue WHERE status = $1::job_status") - .bind(&status) + .bind(status) .fetch_one(&mut *conn) .await .unwrap() @@ -1105,7 +1105,7 @@ mod tests { { // The fixtures include an available job, so let's complete it while the txn is open. let mut batch: PgTransactionBatch<'_, WebhookJobParameters, WebhookJobMetadata> = queue - .dequeue_tx(&"worker_id", 1) + .dequeue_tx("worker_id", 1) .await .expect("failed to dequeue job") .expect("didn't find a job to dequeue"); @@ -1130,10 +1130,10 @@ mod tests { plugin_id: 2, plugin_config_id: 3, }; - let new_job = NewJob::new(1, job_metadata, job_parameters, &"target"); + let new_job = NewJob::new(1, job_metadata, job_parameters, "target"); queue.enqueue(new_job).await.expect("failed to enqueue job"); let mut batch: PgTransactionBatch<'_, WebhookJobParameters, WebhookJobMetadata> = queue - .dequeue_tx(&"worker_id", 1) + .dequeue_tx("worker_id", 1) .await .expect("failed to dequeue job") .expect("didn't find a job to dequeue"); @@ -1158,7 +1158,7 @@ mod tests { plugin_id: 2, plugin_config_id: 3, }; - let new_job = NewJob::new(1, job_metadata, job_parameters, &"target"); + let new_job = NewJob::new(1, job_metadata, job_parameters, "target"); queue.enqueue(new_job).await.expect("failed to enqueue job"); } diff --git a/rust/hook-worker/Cargo.toml b/rust/hook-worker/Cargo.toml index d09a241206911..f7aaf59c75298 100644 --- a/rust/hook-worker/Cargo.toml +++ b/rust/hook-worker/Cargo.toml @@ -25,6 +25,8 @@ tokio = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } url = { version = "2.2" } +common-metrics = { path = "../common/metrics" } +common-dns = { path = "../common/dns" } [dev-dependencies] httpmock = { workspace = true } diff --git a/rust/hook-worker/src/error.rs b/rust/hook-worker/src/error.rs index 70877f2e1c3cc..3b12bf2897778 100644 --- a/rust/hook-worker/src/error.rs +++ b/rust/hook-worker/src/error.rs @@ -2,7 +2,7 @@ use std::error::Error; use std::fmt; use std::time; -use crate::dns::NoPublicIPv4Error; +use common_dns::NoPublicIPv4Error; use hook_common::{pgqueue, webhook::WebhookJobError}; use http::StatusCode; use thiserror::Error; diff --git a/rust/hook-worker/src/lib.rs b/rust/hook-worker/src/lib.rs index 94a07584f1da5..8488d15b20a36 100644 --- a/rust/hook-worker/src/lib.rs +++ b/rust/hook-worker/src/lib.rs @@ -1,5 +1,4 @@ pub mod config; -pub mod dns; pub mod error; pub mod util; pub mod worker; diff --git a/rust/hook-worker/src/main.rs b/rust/hook-worker/src/main.rs index 5400ff93bf6a4..798586bc6ed5e 100644 --- a/rust/hook-worker/src/main.rs +++ b/rust/hook-worker/src/main.rs @@ -2,13 +2,13 @@ use axum::routing::get; use axum::Router; use envconfig::Envconfig; +use hook_common::pgqueue::PgQueue; +use hook_common::retry::RetryPolicy; use std::future::ready; +use common_metrics::{serve, setup_metrics_routes}; use health::HealthRegistry; use hook_common::kafka_producer::create_kafka_producer; -use hook_common::{ - metrics::serve, metrics::setup_metrics_routes, pgqueue::PgQueue, retry::RetryPolicy, -}; use hook_worker::config::Config; use hook_worker::error::WorkerError; use hook_worker::worker::WebhookWorker; diff --git a/rust/hook-worker/src/worker.rs b/rust/hook-worker/src/worker.rs index f59f2dec62713..bba15cd67c9a4 100644 --- a/rust/hook-worker/src/worker.rs +++ b/rust/hook-worker/src/worker.rs @@ -23,11 +23,11 @@ use hook_common::{ webhook::{HttpMethod, WebhookJobError, WebhookJobParameters}, }; -use crate::dns::{NoPublicIPv4Error, PublicIPv4Resolver}; use crate::error::{ is_error_source, WebhookError, WebhookParseError, WebhookRequestError, WorkerError, }; use crate::util::first_n_bytes_of_response; +use common_dns::{NoPublicIPv4Error, PublicIPv4Resolver}; // TODO: Either make this configurable or adjust it once we don't produce results to Kafka, where // our size limit is relatively low. @@ -1026,7 +1026,7 @@ mod tests { .unwrap() .as_array() .unwrap() - .get(0) + .first() .unwrap(); first_timing .get("duration_ms") @@ -1142,7 +1142,7 @@ mod tests { .unwrap() .as_array() .unwrap() - .get(0) + .first() .unwrap(); first_timing .get("duration_ms") @@ -1255,8 +1255,7 @@ mod tests { let err = send_webhook(localhost_client(), &method, url, &headers, body.to_owned()) .await - .err() - .expect("request didn't fail when it should have failed"); + .expect_err("request didn't fail when it should have failed"); assert!(matches!(err, WebhookError::Request(..))); if let WebhookError::Request(request_error) = err { @@ -1281,8 +1280,7 @@ mod tests { let err = send_webhook(localhost_client(), &method, url, &headers, body.to_owned()) .await - .err() - .expect("request didn't fail when it should have failed"); + .expect_err("request didn't fail when it should have failed"); assert!(matches!(err, WebhookError::Request(..))); if let WebhookError::Request(request_error) = err { @@ -1309,8 +1307,7 @@ mod tests { let err = send_webhook(filtering_client, &method, url, &headers, body.to_owned()) .await - .err() - .expect("request didn't fail when it should have failed"); + .expect_err("request didn't fail when it should have failed"); assert!(matches!(err, WebhookError::Request(..))); if let WebhookError::Request(request_error) = err { diff --git a/rust/property-defs-rs/Cargo.toml b/rust/property-defs-rs/Cargo.toml new file mode 100644 index 0000000000000..177b159c9093b --- /dev/null +++ b/rust/property-defs-rs/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "property-defs-rs" +version = "0.1.0" +edition = "2021" + +[dependencies] +uuid = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +rdkafka = { workspace = true } +tokio = { workspace = true } +envconfig = {workspace = true } +tracing = { workspace = true } +tracing-subscriber = { workspace = true } +sqlx = { workspace = true } +futures = { workspace = true } +health = { path = "../common/health" } +time = { workspace = true } +axum = { workspace = true } +serve-metrics = { path = "../common/serve_metrics" } +metrics = { workspace = true } +chrono = { workspace = true } + +[lints] +workspace = true diff --git a/rust/property-defs-rs/src/app_context.rs b/rust/property-defs-rs/src/app_context.rs new file mode 100644 index 0000000000000..317ff2c355925 --- /dev/null +++ b/rust/property-defs-rs/src/app_context.rs @@ -0,0 +1,36 @@ +use std::collections::HashSet; + +use health::{HealthHandle, HealthRegistry}; +use sqlx::{postgres::PgPoolOptions, PgPool}; + +use crate::{config::Config, metrics_consts::UPDATES_ISSUED, types::Update}; + +pub struct AppContext { + pub pool: PgPool, + pub liveness: HealthRegistry, + pub worker_liveness: HealthHandle, +} + +impl AppContext { + pub async fn new(config: &Config) -> Result { + let options = PgPoolOptions::new().max_connections(config.max_pg_connections); + + let pool = options.connect(&config.database_url).await?; + + let liveness: HealthRegistry = HealthRegistry::new("liveness"); + let worker_liveness = liveness + .register("worker".to_string(), time::Duration::seconds(60)) + .await; + + Ok(Self { + pool, + liveness, + worker_liveness, + }) + } + + pub async fn issue(&self, updates: HashSet) -> Result<(), sqlx::Error> { + metrics::counter!(UPDATES_ISSUED).increment(updates.len() as u64); + Ok(()) + } +} diff --git a/rust/property-defs-rs/src/config.rs b/rust/property-defs-rs/src/config.rs new file mode 100644 index 0000000000000..f74b3a90886e9 --- /dev/null +++ b/rust/property-defs-rs/src/config.rs @@ -0,0 +1,66 @@ +use envconfig::Envconfig; +use rdkafka::ClientConfig; + +#[derive(Envconfig, Clone)] +pub struct Config { + #[envconfig(default = "postgres://posthog:posthog@localhost:5432/posthog")] + pub database_url: String, + + #[envconfig(default = "10")] + pub max_pg_connections: u32, + + #[envconfig(nested = true)] + pub kafka: KafkaConfig, + + #[envconfig(default = "10")] + pub max_concurrent_transactions: usize, + + #[envconfig(default = "10000")] + pub max_batch_size: usize, + + // If a worker recieves a batch smaller than this, it will simply not commit the offset and + // sleep for a while, since DB ops/event scales inversely to batch size + #[envconfig(default = "1000")] + pub min_batch_size: usize, + + #[envconfig(default = "100")] + pub next_event_wait_timeout_ms: u64, + + #[envconfig(from = "BIND_HOST", default = "::")] + pub host: String, + + #[envconfig(from = "BIND_PORT", default = "3301")] + pub port: u16, +} + +#[derive(Envconfig, Clone)] +pub struct KafkaConfig { + #[envconfig(default = "kafka:9092")] + pub kafka_hosts: String, + #[envconfig(default = "clickhouse_events_json")] + pub event_topic: String, + #[envconfig(default = "false")] + pub kafka_tls: bool, + #[envconfig(default = "false")] + pub verify_ssl_certificate: bool, + #[envconfig(default = "autocomplete-rs")] + pub consumer_group: String, +} + +impl From<&KafkaConfig> for ClientConfig { + fn from(config: &KafkaConfig) -> Self { + let mut client_config = ClientConfig::new(); + client_config + .set("bootstrap.servers", &config.kafka_hosts) + .set("statistics.interval.ms", "10000") + .set("group.id", config.consumer_group.clone()); + + if config.kafka_tls { + client_config.set("security.protocol", "ssl").set( + "enable.ssl.certificate.verification", + config.verify_ssl_certificate.to_string(), + ); + }; + client_config + } +} diff --git a/rust/property-defs-rs/src/lib.rs b/rust/property-defs-rs/src/lib.rs new file mode 100644 index 0000000000000..7c639d72efa90 --- /dev/null +++ b/rust/property-defs-rs/src/lib.rs @@ -0,0 +1,4 @@ +pub mod app_context; +pub mod config; +pub mod metrics_consts; +pub mod types; diff --git a/rust/property-defs-rs/src/main.rs b/rust/property-defs-rs/src/main.rs new file mode 100644 index 0000000000000..e502daae26c9f --- /dev/null +++ b/rust/property-defs-rs/src/main.rs @@ -0,0 +1,145 @@ +use std::{collections::HashSet, sync::Arc, time::Duration}; + +use axum::{routing::get, Router}; +use envconfig::Envconfig; +use futures::future::ready; +use property_defs_rs::{ + app_context::AppContext, + config::Config, + metrics_consts::{BATCH_SKIPPED, EVENTS_RECEIVED, FORCED_SMALL_BATCH, SMALL_BATCH_SLEEP}, + types::{Event, Update}, +}; +use rdkafka::{ + consumer::{Consumer, StreamConsumer}, + message::BorrowedMessage, + ClientConfig, Message, +}; +use serve_metrics::{serve, setup_metrics_routes}; +use tokio::{select, task::JoinHandle, time::sleep}; +use tracing::{info, warn}; +use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer}; + +fn setup_tracing() { + let log_layer: tracing_subscriber::filter::Filtered< + tracing_subscriber::fmt::Layer, + EnvFilter, + tracing_subscriber::Registry, + > = tracing_subscriber::fmt::layer().with_filter(EnvFilter::from_default_env()); + tracing_subscriber::registry().with(log_layer).init(); +} + +pub async fn index() -> &'static str { + "property definitions service" +} + +fn start_health_liveness_server(config: &Config, context: Arc) -> JoinHandle<()> { + let config = config.clone(); + let router = Router::new() + .route("/", get(index)) + .route("/_readiness", get(index)) + .route( + "/_liveness", + get(move || ready(context.liveness.get_status())), + ); + let router = setup_metrics_routes(router); + let bind = format!("{}:{}", config.host, config.port); + tokio::task::spawn(async move { + serve(router, &bind) + .await + .expect("failed to start serving metrics"); + }) +} + +#[tokio::main] +async fn main() -> Result<(), Box> { + setup_tracing(); + info!("Starting up..."); + + let config = Config::init_from_env()?; + + let kafka_config: ClientConfig = (&config.kafka).into(); + + let consumer: StreamConsumer = kafka_config.create()?; + + let context = Arc::new(AppContext::new(&config).await?); + + consumer.subscribe(&[config.kafka.event_topic.as_str()])?; + + info!("Subscribed to topic: {}", config.kafka.event_topic); + + start_health_liveness_server(&config, context.clone()); + + let mut batch = Vec::with_capacity(config.max_batch_size); + + let mut sleep_count = 0; + loop { + context.worker_liveness.report_healthy().await; + + while batch.len() < config.max_batch_size { + // Try to grab from the consumer, but use a select! to timeout if we'd block for more than some time + select! { + res = consumer.recv() => { + batch.push(res?); // Workers die on an kafka error + } + _ = sleep(Duration::from_millis(config.next_event_wait_timeout_ms)) => { + break; + } + } + } + + // We only process batches over a certain threshold, unless we haven't received anything in a while, to reduce DB load + if batch.len() < config.min_batch_size { + sleep_count += 1; + info!("Batch size is less than min_batch_size, sleeping for 2 seconds"); + metrics::counter!(BATCH_SKIPPED).increment(1); + sleep(Duration::from_millis(2000)).await; + if sleep_count > 10 { + warn!("Slept too many times, continuing with a small batch"); + metrics::counter!(FORCED_SMALL_BATCH).increment(1); + } else { + metrics::counter!(SMALL_BATCH_SLEEP).increment(1); + continue; + } + } + sleep_count = 0; + + metrics::counter!(EVENTS_RECEIVED).increment(batch.len() as u64); + + let updates: HashSet = batch + .drain(..) + .filter_map(message_to_event) + .flat_map(Event::into_updates) + .filter_map(filter_cached) + .collect(); + + context.issue(updates).await?; + } +} + +// This copies event properties, which means the total resident memory usage is higher than we'd like, and that constrains +// our batch size. serde_json provides no zero-copy way to parse a JSON object, so we're stuck with this for now. +fn message_to_event(msg: BorrowedMessage) -> Option { + let Some(payload) = msg.payload() else { + warn!("Received empty event"); + metrics::counter!("empty_event").increment(1); + return None; + }; + + let event = serde_json::from_slice::(payload); + let event = match event { + Ok(e) => e, + Err(e) => { + metrics::counter!("event_parse_error").increment(1); + warn!("Failed to parse event: {:?}", e); + return None; + } + }; + Some(event) +} + +// TODO: this is where caching would go, if we had any. Could probably use a bloom filter or something, +// rather than storing the entire update in memory, if we wanted to store some HUGE number of updates and +// be /really/ good about not hitting the DB when we don't need to. Right now this is just a no-op. +fn filter_cached(update: Update) -> Option { + Some(update) +} diff --git a/rust/property-defs-rs/src/metrics_consts.rs b/rust/property-defs-rs/src/metrics_consts.rs new file mode 100644 index 0000000000000..5cb4ba0091f61 --- /dev/null +++ b/rust/property-defs-rs/src/metrics_consts.rs @@ -0,0 +1,6 @@ +pub const UPDATES_ISSUED: &str = "prop_defs_issued_updates"; +pub const BATCH_SKIPPED: &str = "prop_defs_batch_skipped"; +pub const EVENTS_RECEIVED: &str = "prop_defs_events_received"; +pub const EVENTS_SKIPPED: &str = "prop_defs_events_skipped"; +pub const FORCED_SMALL_BATCH: &str = "prop_defs_forced_small_batch"; +pub const SMALL_BATCH_SLEEP: &str = "prop_defs_small_batch_sleep"; diff --git a/rust/property-defs-rs/src/types.rs b/rust/property-defs-rs/src/types.rs new file mode 100644 index 0000000000000..6312fc564d849 --- /dev/null +++ b/rust/property-defs-rs/src/types.rs @@ -0,0 +1,346 @@ +use std::{fmt, hash::Hash, str::FromStr}; + +use chrono::{DateTime, Duration, DurationRound, RoundingError, Utc}; +use serde::{Deserialize, Serialize}; +use serde_json::{Map, Value}; +use tracing::warn; +use uuid::Uuid; + +use crate::metrics_consts::EVENTS_SKIPPED; + +pub const SKIP_PROPERTIES: [&str; 9] = [ + "$set", + "$set_once", + "$unset", + "$group_0", + "$group_1", + "$group_2", + "$group_3", + "$group_4", + "$groups", +]; + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub enum PropertyParentType { + Event = 1, + Person = 2, + Group = 3, + Session = 4, +} + +impl From for i32 { + fn from(parent_type: PropertyParentType) -> i32 { + match parent_type { + PropertyParentType::Event => 1, + PropertyParentType::Person => 2, + PropertyParentType::Group => 3, + PropertyParentType::Session => 4, + } + } +} + +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +pub enum PropertyValueType { + DateTime, + String, + Numeric, + Boolean, + Duration, +} + +impl fmt::Display for PropertyValueType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + PropertyValueType::DateTime => write!(f, "DateTime"), + PropertyValueType::String => write!(f, "String"), + PropertyValueType::Numeric => write!(f, "Numeric"), + PropertyValueType::Boolean => write!(f, "Boolean"), + PropertyValueType::Duration => write!(f, "Duration"), + } + } +} + +// The grouptypemapping table uses i32's, but we get group types by name, so we have to resolve them before DB writes, sigh +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum GroupType { + Unresolved(String), + Resolved(String, i32), +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct PropertyDefinition { + pub id: Uuid, + pub team_id: i32, + pub name: String, + pub is_numerical: bool, + pub property_type: Option, + pub event_type: Option, + pub group_type_index: Option, + pub property_type_format: Option, // Deprecated + pub volume_30_day: Option, // Deprecated + pub query_usage_30_day: Option, // Deprecated +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct EventDefinition { + pub id: Uuid, + pub name: String, + pub team_id: i32, + pub last_seen_at: DateTime, +} + +// Derived hash since these are keyed on all fields in the DB +#[derive(Clone, Debug, Hash, Eq, PartialEq)] +pub struct EventProperty { + team_id: i32, + event: String, + property: String, +} + +// Represents a generic update, but comparable, allowing us to dedupe and cache updates +#[derive(Clone, Debug, Hash, Eq, PartialEq)] +pub enum Update { + Event(EventDefinition), + Property(PropertyDefinition), + EventProperty(EventProperty), +} + +#[derive(Clone, Debug, Deserialize)] +pub struct Event { + pub team_id: i32, + pub event: String, + pub properties: Option, +} + +impl From<&Event> for EventDefinition { + fn from(event: &Event) -> Self { + EventDefinition { + id: Uuid::now_v7(), + name: sanitize_event_name(&event.event), + team_id: event.team_id, + // We round last seen to the nearest day, as per the TS impl. Unwrap is safe here because we + // the duration is positive, non-zero, and smaller than time since epoch + last_seen_at: floor_datetime(Utc::now(), Duration::days(1)).unwrap(), + } + } +} + +impl Event { + pub fn into_updates(self) -> Vec { + let team_id = self.team_id; + let event = self.event.clone(); + + let updates = self.into_updates_inner(); + if updates.len() > 10_000 { + warn!( + "Event {} for team {} has more than 10,000 properties, skipping", + event, team_id + ); + metrics::counter!(EVENTS_SKIPPED).increment(1); + return vec![]; + } + + updates + } + + fn into_updates_inner(self) -> Vec { + let mut updates = vec![Update::Event(EventDefinition::from(&self))]; + let Some(props) = &self.properties else { + return updates; + }; + + let Ok(props) = Value::from_str(props) else { + return updates; + }; + + let Value::Object(props) = props else { + return updates; + }; + + // If this is a groupidentify event, we ONLY bubble up the group properties + if self.event == "$groupidentify" { + let Some(Value::String(group_type)) = props.get("$group_type") else { + return updates; + }; + let group_type = GroupType::Unresolved(group_type.clone()); + + let Some(group_properties) = props.get("$group_set") else { + return updates; + }; + + let Value::Object(group_properties) = group_properties else { + return updates; + }; + + self.get_props_from_object( + &mut updates, + group_properties, + PropertyParentType::Group, + Some(group_type), + ); + return updates; + } + + // Grab the "ordinary" (non-person) event properties + self.get_props_from_object(&mut updates, &props, PropertyParentType::Event, None); + + // If there are any person properties, also push those into the flat property map. + if let Some(Value::Object(set_props)) = props.get("$set") { + self.get_props_from_object(&mut updates, set_props, PropertyParentType::Person, None) + } + if let Some(Value::Object(set_once_props)) = props.get("$set_once") { + self.get_props_from_object( + &mut updates, + set_once_props, + PropertyParentType::Person, + None, + ) + } + + updates + } + + fn get_props_from_object( + &self, + updates: &mut Vec, + set: &Map, + parent_type: PropertyParentType, + group_type: Option, + ) { + updates.reserve(set.len() * 2); + for (key, value) in set { + if SKIP_PROPERTIES.contains(&key.as_str()) && parent_type == PropertyParentType::Event { + continue; + } + + updates.push(Update::EventProperty(EventProperty { + team_id: self.team_id, + event: self.event.clone(), + property: key.clone(), + })); + + let property_type = detect_property_type(key, value); + let is_numerical = matches!(property_type, Some(PropertyValueType::Numeric)); + + let def = PropertyDefinition { + id: Uuid::now_v7(), + team_id: self.team_id, + name: key.clone(), + is_numerical, + property_type, + event_type: Some(parent_type), + group_type_index: group_type.clone(), + property_type_format: None, + volume_30_day: None, + query_usage_30_day: None, + }; + updates.push(Update::Property(def)); + } + } +} + +fn detect_property_type(key: &str, value: &Value) -> Option { + // There are a whole set of special cases here, taken from the TS + if key.starts_with("utm_") { + // utm_ prefixed properties should always be detected as strings. + // Sometimes the first value sent looks like a number, event though + // subsequent values are not. See + // https://github.com/PostHog/posthog/issues/12529 for more context. + return Some(PropertyValueType::String); + } + if key.starts_with("$feature/") { + // $feature/ prefixed properties should always be detected as strings. + // These are feature flag values, and can be boolean or string. + // Sometimes the first value sent is boolean (because flag isn't enabled) while + // subsequent values are not. We don't want this to be misunderstood as a boolean. + return Some(PropertyValueType::String); + } + + if key == "$feature_flag_response" { + // $feature_flag_response properties should always be detected as strings. + // These are feature flag values, and can be boolean or string. + // Sometimes the first value sent is boolean (because flag isn't enabled) while + // subsequent values are not. We don't want this to be misunderstood as a boolean. + return Some(PropertyValueType::String); + } + + if key.starts_with("$survey_response") { + // NB: $survey_responses are collected in an interesting way, where the first + // response is called `$survey_response` and subsequent responses are called + // `$survey_response_2`, `$survey_response_3`, etc. So, this check should auto-cast + // all survey responses to strings, and $survey_response properties should always be detected as strings. + return Some(PropertyValueType::String); + } + + match value { + Value::String(s) => { + let s = &s.trim().to_lowercase(); + if s == "true" || s == "false" { + Some(PropertyValueType::Boolean) + } else { + // TODO - we should try to auto-detect datetime strings here, but I'm skipping the chunk of regex necessary to do it for v0 + Some(PropertyValueType::String) + } + } + Value::Number(_) => { + // TODO - this is a divergence from the TS impl - the TS also checks if the contained number is + // "likely" to be a unix timestamp on the basis of the number of characters. I have mixed feelings about this, + // so I'm going to leave it as just checking the key for now. This means we're being /less/ strict with datetime + // detection here than in the TS + if key.to_lowercase().contains("timestamp") || key.to_lowercase().contains("time") { + Some(PropertyValueType::DateTime) + } else { + Some(PropertyValueType::Numeric) + } + } + Value::Bool(_) => Some(PropertyValueType::Boolean), + _ => None, + } +} + +fn sanitize_event_name(event_name: &str) -> String { + event_name.replace('\u{0000}', "\u{FFFD}") +} + +// These hash impls correspond to DB uniqueness constraints, pulled from the TS + +impl Hash for PropertyDefinition { + fn hash(&self, state: &mut H) { + self.team_id.hash(state); + self.name.hash(state); + self.event_type.hash(state); + self.group_type_index.hash(state); + } +} + +impl Hash for EventDefinition { + fn hash(&self, state: &mut H) { + self.team_id.hash(state); + self.name.hash(state); + self.last_seen_at.hash(state) + } +} + +// Ensure group type hashes identically regardless of whether it's resolved or not. Note that if +// someone changes the name associated with a group type, all subsequent events will hash differently +// because of this, but that seems fine - it just means a few extra DB ops issued, we index on the i32 +// at write time anyway +impl Hash for GroupType { + fn hash(&self, state: &mut H) { + match self { + GroupType::Unresolved(name) => name.hash(state), + GroupType::Resolved(name, _) => name.hash(state), + } + } +} + +fn floor_datetime(dt: DateTime, duration: Duration) -> Result, RoundingError> { + let rounded = dt.duration_round(duration)?; + + // If we rounded up + if rounded > dt { + Ok(rounded - duration) + } else { + Ok(rounded) + } +}