diff --git a/.github/workflows/ci-backend.yml b/.github/workflows/ci-backend.yml index 48a0454bd0f33f..410d4deb18461b 100644 --- a/.github/workflows/ci-backend.yml +++ b/.github/workflows/ci-backend.yml @@ -91,7 +91,6 @@ jobs: backend-code-quality: needs: changes - # if: needs.changes.outputs.backend == 'true' # <- Can't add this here because it's a required check timeout-minutes: 30 name: Python code quality checks @@ -106,12 +105,10 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} - uses: actions/checkout@v3 - if: needs.changes.outputs.backend == 'true' with: fetch-depth: 1 - name: Set up Python - if: needs.changes.outputs.backend == 'true' uses: actions/setup-python@v5 with: python-version: 3.11.9 @@ -121,40 +118,32 @@ jobs: # uv is a fast pip alternative: https://github.com/astral-sh/uv/ - run: pip install uv - if: needs.changes.outputs.backend == 'true' - name: Install SAML (python3-saml) dependencies - if: needs.changes.outputs.backend == 'true' run: | sudo apt-get update sudo apt-get install libxml2-dev libxmlsec1 libxmlsec1-dev libxmlsec1-openssl - name: Install Python dependencies - if: needs.changes.outputs.backend == 'true' run: | uv pip install --system -r requirements.txt -r requirements-dev.txt - name: Check for syntax errors, import sort, and code style violations - if: needs.changes.outputs.backend == 'true' run: | ruff check . - name: Check formatting - if: needs.changes.outputs.backend == 'true' run: | ruff format --check --diff . - name: Add Problem Matcher - if: needs.changes.outputs.backend == 'true' run: echo "::add-matcher::.github/mypy-problem-matcher.json" - name: Check static typing - if: needs.changes.outputs.backend == 'true' run: | mypy -p posthog | mypy-baseline filter - name: Check if "schema.py" is up to date - if: needs.changes.outputs.backend == 'true' run: | npm run schema:build:python && git diff --exit-code @@ -235,9 +224,8 @@ jobs: django: needs: changes - # if: needs.changes.outputs.backend == 'true' # <- Can't add this here because it's a required check - - timeout-minutes: 30 # increase for tmate testing + # increase for tmate testing + timeout-minutes: 30 name: Django tests – ${{ matrix.segment }} (persons-on-events ${{ matrix.person-on-events && 'on' || 'off' }}), Py ${{ matrix.python-version }}, ${{ matrix.clickhouse-server-image }} (${{matrix.group}}/${{ matrix.concurrency }}) runs-on: ubuntu-24.04 @@ -273,8 +261,10 @@ jobs: group: 3 steps: + # The first step is the only one that should run if `needs.changes.outputs.backend == 'false'`. + # All the other ones should rely on `needs.changes.outputs.backend` directly or indirectly, so that they're + # effectively skipped if backend code is unchanged. See https://github.com/PostHog/posthog/pull/15174. - uses: actions/checkout@v3 - if: needs.changes.outputs.backend == 'true' with: fetch-depth: 1 repository: ${{ github.event.pull_request.head.repo.full_name }} @@ -317,7 +307,7 @@ jobs: fi - name: Fail CI if some snapshots have been updated but not committed - if: needs.changes.outputs.backend == 'true' && steps.changed-files.outputs.files_found == 'true' && steps.add-and-commit.outcome == 'success' + if: steps.changed-files.outputs.files_found == 'true' && steps.add-and-commit.outcome == 'success' run: | echo "${{ steps.changed-files.outputs.diff }}" exit 1 @@ -333,11 +323,11 @@ jobs: async-migrations: name: Async migrations tests - ${{ matrix.clickhouse-server-image }} needs: changes - if: needs.changes.outputs.backend == 'true' strategy: fail-fast: false matrix: clickhouse-server-image: ['clickhouse/clickhouse-server:24.8.7.41'] + if: needs.changes.outputs.backend == 'true' runs-on: ubuntu-24.04 steps: - name: 'Checkout repo' @@ -391,7 +381,7 @@ jobs: calculate-running-time: name: Calculate running time - needs: [changes, django, async-migrations] + needs: [django, async-migrations] runs-on: ubuntu-24.04 if: # Run on pull requests to PostHog/posthog + on PostHog/posthog outside of PRs - but never on forks needs.changes.outputs.backend == 'true' && diff --git a/.github/workflows/ci-e2e.yml b/.github/workflows/ci-e2e.yml index ada720bb488628..4ec28981e5f1db 100644 --- a/.github/workflows/ci-e2e.yml +++ b/.github/workflows/ci-e2e.yml @@ -68,7 +68,6 @@ jobs: container: name: Build and cache container image runs-on: ubuntu-24.04 - if: needs.changes.outputs.shouldTriggerCypress == 'true' timeout-minutes: 60 needs: [changes] permissions: @@ -79,9 +78,12 @@ jobs: build-id: ${{ steps.build.outputs.build-id }} steps: - name: Checkout + if: needs.changes.outputs.shouldTriggerCypress == 'true' uses: actions/checkout@v3 - name: Build the Docker image with Depot - uses: ./.github/actions/build-n-cache-image # Build the container image in preparation for the E2E tests + if: needs.changes.outputs.shouldTriggerCypress == 'true' + # Build the container image in preparation for the E2E tests + uses: ./.github/actions/build-n-cache-image id: build with: save: true @@ -166,8 +168,7 @@ jobs: build-id: ${{ needs.container.outputs.build-id }} tags: ${{ needs.container.outputs.tag }} - - name: Write .env - if: needs.changes.outputs.shouldTriggerCypress == 'true' + - name: Write .env # This step intentionally has no if, so that GH always considers the action as having run run: | cat <> .env SECRET_KEY=6b01eee4f945ca25045b5aab440b953461faf08693a9abbf1166dc7c6b9772da @@ -279,7 +280,7 @@ jobs: calculate-running-time: name: Calculate running time runs-on: ubuntu-24.04 - needs: [changes, cypress] + needs: [cypress] if: needs.changes.outputs.shouldTriggerCypress == 'true' && github.event.pull_request.head.repo.full_name == 'PostHog/posthog' steps: diff --git a/.github/workflows/ci-frontend.yml b/.github/workflows/ci-frontend.yml index eedc30ca910109..f59c7e8eef7908 100644 --- a/.github/workflows/ci-frontend.yml +++ b/.github/workflows/ci-frontend.yml @@ -52,13 +52,12 @@ jobs: frontend-code-quality: name: Code quality checks - # if: needs.changes.outputs.frontend == 'true' # <- Can't add this here because it's a required check needs: changes # kea typegen and typescript:check need some more oomph runs-on: ubuntu-24.04 steps: + # we need at least one thing to run to make sure we include everything for required jobs - uses: actions/checkout@v3 - if: needs.changes.outputs.frontend == 'true' - name: Install pnpm if: needs.changes.outputs.frontend == 'true' @@ -124,7 +123,6 @@ jobs: jest: runs-on: ubuntu-24.04 needs: changes - # if: needs.changes.outputs.frontend == 'true' # <- Can't add this here because it's a required check name: Jest test (${{ matrix.segment }} - ${{ matrix.chunk }}) strategy: @@ -135,8 +133,8 @@ jobs: chunk: [1, 2, 3] steps: + # we need at least one thing to run to make sure we include everything for required jobs - uses: actions/checkout@v3 - if: needs.changes.outputs.frontend == 'true' - name: Remove ee if: needs.changes.outputs.frontend == 'true' && matrix.segment == 'FOSS' diff --git a/.github/workflows/ci-hog.yml b/.github/workflows/ci-hog.yml index 2d9f67dc27ebbb..69a3bc3d5f1338 100644 --- a/.github/workflows/ci-hog.yml +++ b/.github/workflows/ci-hog.yml @@ -11,12 +11,10 @@ on: paths-ignore: - rust/** - livestream/** - - .github/** pull_request: paths-ignore: - rust/** - livestream/** - - .github/** jobs: # Job to decide if we should run backend ci @@ -49,7 +47,6 @@ jobs: hog-tests: needs: changes - if: needs.changes.outputs.hog == 'true' timeout-minutes: 30 name: Hog tests @@ -59,15 +56,17 @@ jobs: # If this run wasn't initiated by the bot (meaning: snapshot update) and we've determined # there are backend changes, cancel previous runs - uses: n1hility/cancel-previous-runs@v3 - if: github.actor != 'posthog-bot' + if: github.actor != 'posthog-bot' && needs.changes.outputs.hog == 'true' with: token: ${{ secrets.GITHUB_TOKEN }} - uses: actions/checkout@v3 + if: needs.changes.outputs.hog == 'true' with: fetch-depth: 1 - name: Set up Python + if: needs.changes.outputs.hog == 'true' uses: actions/setup-python@v5 with: python-version: 3.11.9 @@ -77,25 +76,31 @@ jobs: # uv is a fast pip alternative: https://github.com/astral-sh/uv/ - run: pip install uv + if: needs.changes.outputs.hog == 'true' - name: Install SAML (python3-saml) dependencies + if: needs.changes.outputs.hog == 'true' run: | sudo apt-get update sudo apt-get install libxml2-dev libxmlsec1 libxmlsec1-dev libxmlsec1-openssl - name: Install Python dependencies + if: needs.changes.outputs.hog == 'true' run: | uv pip install --system -r requirements.txt -r requirements-dev.txt - name: Install pnpm + if: needs.changes.outputs.hog == 'true' uses: pnpm/action-setup@v4 - name: Set up Node.js + if: needs.changes.outputs.hog == 'true' uses: actions/setup-node@v4 with: node-version: 18.12.1 - name: Check if ANTLR definitions are up to date + if: needs.changes.outputs.hog == 'true' run: | cd .. sudo apt-get install default-jre @@ -118,21 +123,25 @@ jobs: ANTLR_VERSION: '4.13.2' - name: Check if STL bytecode is up to date + if: needs.changes.outputs.hog == 'true' run: | python -m hogvm.stl.compile git diff --exit-code - name: Run HogVM Python tests + if: needs.changes.outputs.hog == 'true' run: | pytest hogvm - name: Run HogVM TypeScript tests + if: needs.changes.outputs.hog == 'true' run: | cd hogvm/typescript pnpm install --frozen-lockfile pnpm run test - name: Run Hog tests + if: needs.changes.outputs.hog == 'true' run: | cd hogvm/typescript pnpm run build diff --git a/.github/workflows/ci-plugin-server.yml b/.github/workflows/ci-plugin-server.yml index 814550ddf9d804..30ca845cd89b69 100644 --- a/.github/workflows/ci-plugin-server.yml +++ b/.github/workflows/ci-plugin-server.yml @@ -81,7 +81,6 @@ jobs: tests: name: Plugin Server Tests (${{matrix.shard}}) needs: changes - # if: needs.changes.outputs.plugin-server == 'true' # <- Can't add this here because it's a required check runs-on: ubuntu-24.04 strategy: @@ -97,6 +96,7 @@ jobs: steps: - name: Code check out + # NOTE: We need this check on every step so that it still runs if skipped as we need it to suceed for the CI if: needs.changes.outputs.plugin-server == 'true' uses: actions/checkout@v3 @@ -121,14 +121,12 @@ jobs: # uv is a fast pip alternative: https://github.com/astral-sh/uv/ - run: pip install uv - if: needs.changes.outputs.plugin-server == 'true' - name: Install rust if: needs.changes.outputs.plugin-server == 'true' uses: dtolnay/rust-toolchain@1.82 - uses: actions/cache@v4 - if: needs.changes.outputs.plugin-server == 'true' with: path: | ~/.cargo/registry @@ -195,7 +193,6 @@ jobs: functional-tests: name: Functional tests needs: changes - # if: needs.changes.outputs.plugin-server == 'true' # <- Can't add this here because it's a required check runs-on: ubuntu-24.04 env: @@ -233,7 +230,6 @@ jobs: # uv is a fast pip alternative: https://github.com/astral-sh/uv/ - run: pip install uv - if: needs.changes.outputs.plugin-server == 'true' - name: Install SAML (python3-saml) dependencies if: needs.changes.outputs.plugin-server == 'true' diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 7f1aef10ba0d70..56f1ed3bf0330b 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -16,12 +16,10 @@ on: branches: ['master'] paths-ignore: - 'rust/**' - - '.github/**' pull_request: branches: ['master'] paths-ignore: - 'rust/**' - - '.github/**' schedule: - cron: '27 1 * * 0' diff --git a/.github/workflows/container-images-ci.yml b/.github/workflows/container-images-ci.yml index f696fe671bfc5d..7b434a7cb546de 100644 --- a/.github/workflows/container-images-ci.yml +++ b/.github/workflows/container-images-ci.yml @@ -5,7 +5,6 @@ on: paths-ignore: - 'rust/**' - 'livestream/**' - - '.github/**' concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} diff --git a/.vscode/launch.json b/.vscode/launch.json index 3aa83e0bb4e0d4..9eb3fe62780f29 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -106,6 +106,30 @@ "group": "main" } }, + { + "name": "Celery Beat", + "consoleName": "Celery Beat", + "type": "debugpy", + "justMyCode": true, + "autoReload": { + "enable": true, + "include": ["posthog/**/*.py"] + }, + "request": "launch", + "program": "${workspaceFolder}/manage.py", + "args": ["run_autoreload_celery", "--type=beat"], + "console": "integratedTerminal", + "cwd": "${workspaceFolder}", + "env": { + "SKIP_ASYNC_MIGRATIONS_SETUP": "1", + "DEBUG": "1", + "BILLING_SERVICE_URL": "https://billing.dev.posthog.dev", + "SKIP_SERVICE_VERSION_REQUIREMENTS": "1" + }, + "presentation": { + "group": "main" + } + }, { "name": "Plugin Server", "command": "npm run start:dev", @@ -200,6 +224,27 @@ "console": "integratedTerminal", "internalConsoleOptions": "neverOpen", "runtimeExecutable": "${env:HOME}/.nvm/versions/node/${input:pickVersion}/bin/node" + }, + { + "name": "Python Debugger: Django Migrations", + "type": "debugpy", + "request": "launch", + "args": ["migrate", "posthog"], + "django": true, + "env": { + "PYTHONUNBUFFERED": "1", + "DJANGO_SETTINGS_MODULE": "posthog.settings", + "DEBUG": "1", + "CLICKHOUSE_SECURE": "False", + "KAFKA_HOSTS": "localhost", + "DATABASE_URL": "postgres://posthog:posthog@localhost:5432/posthog", + "SKIP_SERVICE_VERSION_REQUIREMENTS": "1", + "PRINT_SQL": "1", + "BILLING_SERVICE_URL": "http://localhost:8100/", + "CLOUD_DEPLOYMENT": "dev" + }, + "autoStartBrowser": false, + "program": "${workspaceFolder}/manage.py" } ], "inputs": [ @@ -212,7 +257,14 @@ "compounds": [ { "name": "PostHog", - "configurations": ["Backend", "Celery Threaded Pool", "Frontend", "Plugin Server", "Temporal Worker"], + "configurations": [ + "Backend", + "Celery Threaded Pool", + "Celery Beat", + "Frontend", + "Plugin Server", + "Temporal Worker" + ], "stopAll": true, "presentation": { "order": 1, diff --git a/cypress/README.md b/cypress/README.md index 0930a0ad7bcbf3..f043f6f0546b42 100644 --- a/cypress/README.md +++ b/cypress/README.md @@ -2,17 +2,13 @@ The Cypress tests run with a PostHog instance that has no feature flags set up. -To test feature flags you can intercept the call to the `decide` endpoint +To test feature flags you can intercept the call to the `decide` endpoint using this helper ```javascript // sometimes the system under test calls `/decide` // and sometimes it calls https://app.posthog.com/decide -cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - // add feature flags here, for e.g. - // 'feature-flag-key': true, - }) - ) -) +setupFeatureFlags({ + // add feature flags here, for e.g. + 'feature-flag-key': true, +}) ``` diff --git a/cypress/e2e/alerts.cy.ts b/cypress/e2e/alerts.cy.ts index 82bd6bc10f4fbe..91ecad1d244893 100644 --- a/cypress/e2e/alerts.cy.ts +++ b/cypress/e2e/alerts.cy.ts @@ -1,15 +1,11 @@ -import { decideResponse } from '../fixtures/api/decide' import { createInsight, createInsightWithBreakdown } from '../productAnalytics' +import { setupFeatureFlags } from '../support/decide' describe('Alerts', () => { beforeEach(() => { - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - alerts: true, - }) - ) - ) + setupFeatureFlags({ + alerts: true, + }) createInsight('insight') }) diff --git a/cypress/e2e/experiments.cy.ts b/cypress/e2e/experiments.cy.ts index 5a7d92c3f49c1e..a635cf7841cad0 100644 --- a/cypress/e2e/experiments.cy.ts +++ b/cypress/e2e/experiments.cy.ts @@ -1,4 +1,4 @@ -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' describe('Experiments', () => { let randomNum @@ -47,13 +47,10 @@ describe('Experiments', () => { }) const createExperimentInNewUi = (): void => { - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - 'new-experiments-ui': true, - }) - ) - ) + setupFeatureFlags({ + 'new-experiments-ui': true, + }) + cy.visit('/experiments') // Name, flag key, description diff --git a/cypress/e2e/exports.cy.ts b/cypress/e2e/exports.cy.ts index 7e96b0c56d454a..8131a984256026 100644 --- a/cypress/e2e/exports.cy.ts +++ b/cypress/e2e/exports.cy.ts @@ -1,18 +1,14 @@ import { urls } from 'scenes/urls' -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' // NOTE: As the API data is randomly generated, we are only really testing here that the overall output is correct // The actual graph is not under test describe('Exporting Insights', () => { beforeEach(() => { - cy.intercept('https://us.i.posthog.com/decide/*', (req) => - req.reply( - decideResponse({ - 'export-dashboard-insights': true, - }) - ) - ) + setupFeatureFlags({ + 'export-dashboard-insights': true, + }) cy.visit(urls.insightNew()) // apply filter cy.get('[data-attr$=add-filter-group]').click() diff --git a/cypress/e2e/featureFlags.cy.ts b/cypress/e2e/featureFlags.cy.ts index 2dceb97af6b21a..df4d740b8ec4bb 100644 --- a/cypress/e2e/featureFlags.cy.ts +++ b/cypress/e2e/featureFlags.cy.ts @@ -1,10 +1,10 @@ -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' describe('Feature Flags', () => { let name beforeEach(() => { - cy.intercept('**/decide/*', (req) => req.reply(decideResponse({}))) + setupFeatureFlags({}) cy.intercept('/api/projects/*/property_definitions?type=person*', { fixture: 'api/feature-flags/property_definition', diff --git a/cypress/e2e/insights-navigation-open-sql-insight-first.cy.ts b/cypress/e2e/insights-navigation-open-sql-insight-first.cy.ts index 85f472a09c97be..a902e861bacd6e 100644 --- a/cypress/e2e/insights-navigation-open-sql-insight-first.cy.ts +++ b/cypress/e2e/insights-navigation-open-sql-insight-first.cy.ts @@ -56,6 +56,9 @@ describe('Insights', () => { it('can open a new stickiness insight', () => { insight.clickTab('STICKINESS') + // this test flaps, so check for a parent element, that is present even when failing + // in the hope that it slows the test down a little and stops it flapping + cy.get('.InsightVizDisplay--type-stickiness').should('exist') cy.get('.TrendsInsight canvas').should('exist') }) diff --git a/cypress/e2e/onboarding.cy.ts b/cypress/e2e/onboarding.cy.ts index b9453689a12aa7..3ffd5ccc4bc275 100644 --- a/cypress/e2e/onboarding.cy.ts +++ b/cypress/e2e/onboarding.cy.ts @@ -1,16 +1,11 @@ -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' describe('Onboarding', () => { beforeEach(() => { cy.intercept('/api/billing/', { fixture: 'api/billing/billing-unsubscribed.json' }) - - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - 'product-intro-pages': 'test', - }) - ) - ) + setupFeatureFlags({ + 'product-intro-pages': 'test', + }) }) it('Navigate between /products to /onboarding to a product intro page', () => { diff --git a/cypress/e2e/signup.cy.ts b/cypress/e2e/signup.cy.ts index 9774236ef81c46..76d7a694d8c50c 100644 --- a/cypress/e2e/signup.cy.ts +++ b/cypress/e2e/signup.cy.ts @@ -1,4 +1,4 @@ -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' const VALID_PASSWORD = 'hedgE-hog-123%' @@ -171,13 +171,9 @@ describe('Signup', () => { }) it('Shows redirect notice if redirecting for maintenance', () => { - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - 'redirect-signups-to-instance': 'us', - }) - ) - ) + setupFeatureFlags({ + 'redirect-signups-to-instance': 'us', + }) cy.visit('/logout') cy.location('pathname').should('include', '/login') diff --git a/cypress/fixtures/api/decide.js b/cypress/fixtures/api/decide.js index 102f1211152c16..7c03b11c6dc48a 100644 --- a/cypress/fixtures/api/decide.js +++ b/cypress/fixtures/api/decide.js @@ -8,6 +8,7 @@ export function decideResponse(featureFlags) { }, isAuthenticated: true, supportedCompression: ['gzip', 'gzip-js', 'lz64'], + hasFeatureFlags: Object.keys(featureFlags).length > 0, featureFlags, sessionRecording: { endpoint: '/s/', diff --git a/cypress/support/decide.ts b/cypress/support/decide.ts new file mode 100644 index 00000000000000..a32e192f74d25c --- /dev/null +++ b/cypress/support/decide.ts @@ -0,0 +1,28 @@ +import { decideResponse } from '../fixtures/api/decide' + +export const setupFeatureFlags = (overrides: Record = {}): void => { + // Tricky - the new RemoteConfig endpoint is optimised to not load decide if there are no feature flags in the DB. + // We need to intercept both the RemoteConfig and the decide endpoint to ensure that the feature flags are always loaded. + + cy.intercept('**/array/*/config', (req) => + req.reply( + decideResponse({ + ...overrides, + }) + ) + ) + + cy.intercept('**/array/*/config.js', (req) => + req.continue((res) => { + res.send(res.body) + }) + ) + + cy.intercept('**/decide/*', (req) => + req.reply( + decideResponse({ + ...overrides, + }) + ) + ) +} diff --git a/cypress/support/e2e.ts b/cypress/support/e2e.ts index fe164bf074b3a6..f05a549c9bf306 100644 --- a/cypress/support/e2e.ts +++ b/cypress/support/e2e.ts @@ -4,7 +4,7 @@ import 'cypress-axe' import { urls } from 'scenes/urls' -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from './decide' try { // eslint-disable-next-line @typescript-eslint/no-var-requires @@ -86,14 +86,7 @@ beforeEach(() => { Cypress.env('POSTHOG_PROPERTY_GITHUB_ACTION_RUN_URL', process.env.GITHUB_ACTION_RUN_URL) cy.useSubscriptionStatus('subscribed') - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - // Feature flag to be treated as rolled out in E2E tests, e.g.: - // 'toolbar-launch-side-action': true, - }) - ) - ) + setupFeatureFlags({}) // un-intercepted sometimes this doesn't work and the page gets stuck on the SpinnerOverlay cy.intercept(/app.posthog.com\/api\/projects\/@current\/feature_flags\/my_flags.*/, (req) => req.reply([])) diff --git a/ee/api/dashboard_collaborator.py b/ee/api/dashboard_collaborator.py index 1687bf5d831154..4cdd8833983cdf 100644 --- a/ee/api/dashboard_collaborator.py +++ b/ee/api/dashboard_collaborator.py @@ -91,7 +91,7 @@ class DashboardCollaboratorViewSet( scope_object = "dashboard" permission_classes = [CanEditDashboardCollaborator] pagination_class = None - queryset = DashboardPrivilege.objects.select_related("dashboard").filter(user__is_active=True) + queryset = DashboardPrivilege.objects.select_related("dashboard", "dashboard__team").filter(user__is_active=True) lookup_field = "user__uuid" serializer_class = DashboardCollaboratorSerializer filter_rewrite_rules = {"project_id": "dashboard__team__project_id"} diff --git a/ee/api/hooks.py b/ee/api/hooks.py index 6dd6dfd85e5c37..22d54c4b7bf8e1 100644 --- a/ee/api/hooks.py +++ b/ee/api/hooks.py @@ -23,6 +23,7 @@ def create_zapier_hog_function(hook: Hook, serializer_context: dict) -> HogFunct serializer = HogFunctionSerializer( data={ "template_id": template_zapier.id, + "type": "destination", "name": f"Zapier webhook for action {hook.resource_id}", "filters": {"actions": [{"id": str(hook.resource_id), "name": "", "type": "actions", "order": 0}]}, "inputs": { diff --git a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr index cfb948992d66d0..983cdf00b5aa0d 100644 --- a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr +++ b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr @@ -1,17 +1,5 @@ # serializer version: 1 # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.1 ''' /* user_id:0 request:_snapshot_ */ SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, @@ -27,7 +15,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.2 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.1 ''' /* user_id:0 request:_snapshot_ */ SELECT countIf(steps = 1) step_1, @@ -106,19 +94,86 @@ GROUP BY prop ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.2 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + LEFT OUTER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max(max_steps)) + GROUP BY prop ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.1 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones ''' /* user_id:0 request:_snapshot_ */ SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, @@ -134,6 +189,85 @@ OFFSET 0 ''' # --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.1 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + LEFT OUTER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'Europe/Amsterdam') >= toDateTime('2020-01-01 14:20:21', 'Europe/Amsterdam') + AND toTimeZone(timestamp, 'Europe/Amsterdam') <= toDateTime('2020-01-06 10:00:00', 'Europe/Amsterdam') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'Europe/Amsterdam') >= toDateTime('2020-01-01 14:20:21', 'Europe/Amsterdam') + AND toTimeZone(timestamp, 'Europe/Amsterdam') <= toDateTime('2020-01-06 10:00:00', 'Europe/Amsterdam') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max(max_steps)) + GROUP BY prop + ''' +# --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.2 ''' /* user_id:0 request:_snapshot_ */ @@ -213,19 +347,181 @@ GROUP BY prop ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants + ''' + /* user_id:0 request:_snapshot_ */ + SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, + count(*) as count + FROM events e + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + LEFT OUTER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max(max_steps)) + GROUP BY prop + ''' +# --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + LEFT OUTER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max(max_steps)) + GROUP BY prop + ''' +# --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation ''' /* user_id:0 request:_snapshot_ */ SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, @@ -241,7 +537,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.1 ''' /* user_id:0 request:_snapshot_ */ SELECT countIf(steps = 1) step_1, @@ -276,13 +572,13 @@ min(latest_1) over (PARTITION by aggregation_target, prop ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , - if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop FROM (SELECT *, if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop FROM (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, '$account_id'), ''), 'null'), '^"|"$', '') as aggregation_target, if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, if(event = '$pageview', 1, 0) as step_0, if(step_0 = 1, timestamp, null) as latest_0, @@ -320,34 +616,6 @@ GROUP BY prop ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.1 - ''' - /* user_id:0 request:_snapshot_ */ - SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, - count(*) as count - FROM events e - WHERE team_id = 99999 - AND event IN ['$pageleave', '$pageview'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.2 ''' /* user_id:0 request:_snapshot_ */ @@ -428,18 +696,6 @@ ''' # --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.1 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -458,7 +714,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.1 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -506,7 +762,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.3 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.2 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -526,7 +782,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.4 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.3 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -590,19 +846,71 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.4 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test']), (['control', 'test']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 99999 + AND event = '$feature_flag_called' + AND (((isNull(replaceRegexpAll(JSONExtractRaw(e.properties, 'exclude'), '^"|"$', '')) + OR NOT JSONHas(e.properties, 'exclude'))) + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', ''))))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -619,7 +927,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -665,7 +973,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -683,7 +991,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 ''' /* user_id:0 request:_snapshot_ */ SELECT [now()] AS date, @@ -692,19 +1000,16 @@ LIMIT 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT [now()] AS date, + [0] AS total, + '' AS breakdown_value + LIMIT 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.1 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -721,7 +1026,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.1 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -767,7 +1072,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.3 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.2 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -785,7 +1090,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.4 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.3 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -847,19 +1152,69 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.4 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 07:00:00', 'US/Pacific')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 02:10:00', 'US/Pacific')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'US/Pacific')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test']), (['control', 'test']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 99999 + AND event = '$feature_flag_called' + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) + AND toTimeZone(timestamp, 'US/Pacific') >= toDateTime('2020-01-01 02:10:00', 'US/Pacific') + AND toTimeZone(timestamp, 'US/Pacific') <= toDateTime('2020-01-06 07:00:00', 'US/Pacific') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.1 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -877,7 +1232,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.1 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -924,7 +1279,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.3 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.2 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -942,6 +1297,68 @@ OFFSET 0 ''' # --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.3 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test']), (['control', 'test']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 99999 + AND event = '$feature_flag_called' + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ''' +# --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.4 ''' /* user_id:0 request:_snapshot_ */ diff --git a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr b/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr index 7c4d474405fc4b..97bc6424e1cbd7 100644 --- a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr +++ b/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr @@ -19,12 +19,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -51,24 +51,24 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -95,24 +95,24 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -139,12 +139,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -171,12 +171,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -238,12 +238,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -313,12 +313,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -388,12 +388,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -463,12 +463,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -538,27 +538,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -628,27 +628,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -718,27 +718,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -808,27 +808,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -898,12 +898,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -973,12 +973,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1048,12 +1048,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1123,12 +1123,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1163,10 +1163,10 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1201,10 +1201,10 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1239,10 +1239,10 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1277,10 +1277,10 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1315,16 +1315,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1359,16 +1359,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1403,16 +1403,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1447,16 +1447,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1491,16 +1491,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1535,16 +1535,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1579,16 +1579,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1623,16 +1623,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC diff --git a/ee/surveys/summaries/summarize_surveys.py b/ee/surveys/summaries/summarize_surveys.py index 1e4b088484f559..1b74ca04d60c86 100644 --- a/ee/surveys/summaries/summarize_surveys.py +++ b/ee/surveys/summaries/summarize_surveys.py @@ -121,7 +121,7 @@ def summarize_survey_responses( we're trying to identify what to work on use as concise and simple language as is possible. generate no text other than the summary. - the aim is to let people see themes in the responses received. return the text in github flavoured markdown format""", + the aim is to let people see themes in the responses received. return the text in markdown format without using any paragraph formatting""", }, ], user=f"{instance_region}/{user.pk}", @@ -131,5 +131,7 @@ def summarize_survey_responses( if usage: TOKENS_IN_PROMPT_HISTOGRAM.observe(usage) + logger.info("survey_summary_response", result=result) + content: str = result.choices[0].message.content or "" return {"content": content, "timings": timer.get_all_timings()} diff --git a/frontend/__snapshots__/components-cards-text-card--template--dark.png b/frontend/__snapshots__/components-cards-text-card--template--dark.png index 1418925cf2961b..08005be3d11b8f 100644 Binary files a/frontend/__snapshots__/components-cards-text-card--template--dark.png and b/frontend/__snapshots__/components-cards-text-card--template--dark.png differ diff --git a/frontend/__snapshots__/components-cards-text-card--template--light.png b/frontend/__snapshots__/components-cards-text-card--template--light.png index c0961d67af400d..fdd4b64968096c 100644 Binary files a/frontend/__snapshots__/components-cards-text-card--template--light.png and b/frontend/__snapshots__/components-cards-text-card--template--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--dark.png index e9fe7de459ce9f..2ad22e1251aa4b 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--light.png index b31b13bf96316e..61c142b66d388d 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--default--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--default--dark.png index 557a24ad829de1..225413d72709da 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--default--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--default--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--default--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--default--light.png index 18eeafe53a294c..0dddcd3fa40409 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--default--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--default--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--dark.png index 557a24ad829de1..225413d72709da 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--light.png index 18eeafe53a294c..0dddcd3fa40409 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--dark.png index 9c3fb1e344f746..acec91d9d57705 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--light.png index e41e7dec7df53c..e35cf20e724cf6 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--dark.png index 2517ac22b8fc92..fac9168f5b9227 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--light.png index 4fce8a837f7a9e..9786d21e2280d3 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--dark.png index d9b70116e57b61..772ca47300a43a 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--light.png index 88ad89c98e8d9f..33839ce93e4d1f 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--dark.png index 0dac5dff8a31fd..876339c60c48f6 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--light.png index b6af1c8e969b66..09f1c1bbb07007 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--dark.png index 7ca69b7425a300..85b075a0f290d8 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--light.png index 4a9a95e7c36d75..4716324e22c094 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--dark.png index 657492c654a5de..f61d8a1aa92178 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--light.png index 7be9fca3cf6be1..4125a9641deab6 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--dark.png index ab975529f0e677..197dba2e766b20 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--light.png index a4a1c5246525be..e3f9727ae5b779 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--dark.png index 0cc51fee095f6f..fe75b85b1853b2 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--light.png index 312e4ac544ef9b..53680276c8d44b 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--dark.png index 13921a926aa454..56ec6eb839a55a 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--light.png index bca3d330610fc7..c415408379d3b2 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--dark.png index 488140bfb1571f..ae176c67d2aa26 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--light.png index 5dea38e2b1c7fe..7e9e414273a44a 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--light.png differ diff --git a/frontend/__snapshots__/components-playerinspector--default--dark.png b/frontend/__snapshots__/components-playerinspector--default--dark.png index 51580b9660e6f1..87c483c27613aa 100644 Binary files a/frontend/__snapshots__/components-playerinspector--default--dark.png and b/frontend/__snapshots__/components-playerinspector--default--dark.png differ diff --git a/frontend/__snapshots__/components-playerinspector--default--light.png b/frontend/__snapshots__/components-playerinspector--default--light.png index 42395f0be8cb4e..04f141310bd43d 100644 Binary files a/frontend/__snapshots__/components-playerinspector--default--light.png and b/frontend/__snapshots__/components-playerinspector--default--light.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png index cb009d55395dd6..b529d9ec4168fe 100644 Binary files a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png index 4c8ac367ea329d..5598463df851bc 100644 Binary files a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--dark.png index 692203c75b49e8..2796435eb68ff2 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--light.png index 63975d42c24c94..958d095dc7b380 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--dark.png index af5eb35b4a393d..320f0dfb39709c 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--light.png index b7c759035f7beb..1ec1d92d13bc5e 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--dark.png index c70073759a9c34..e2a7d303c9bf30 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--light.png index fd30577a0f79f2..c67e7eff5f2d6a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--dark.png index 97b6b6671f4b7e..62005fd240d462 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--light.png index 46de463f355a39..deb1a4130559e1 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--dark.png index 6e53b5cd154139..ccb0203a5d2c34 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--light.png index 6d29186846f99a..c7001a1113b24b 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--dark.png index f86bca1b53ab88..0d37818b855a90 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--light.png index 3015065991f8bd..32569ecc291ad2 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--dark.png index d3cdc976bcd4d5..0ca2d8c95c36b9 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--light.png index 989bb7dc4e1f31..1382bcae5ed6bf 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--dark.png index af5eb35b4a393d..320f0dfb39709c 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--light.png index b7c759035f7beb..1ec1d92d13bc5e 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--dark.png index f1cc69206e84b7..d6279a75e1b906 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--light.png index 65cfd36b3081a2..39a2c68e2cd5e8 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--dark.png index 13aa1b9a431a2c..bcc3113d51f691 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--light.png index a575e4ef6b2284..161c7033dbea2c 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--dark.png index fc30b65924d010..841266fbca5bfa 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--light.png index 3737e331119c9f..514ed70766db79 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--dark.png index c07fa500efba18..022a1933f0729b 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--light.png index dd6fa19630b657..8081ad0512c0ac 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--dark.png index 09f57c49a2ddd7..35a1560f557204 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--light.png index 7357eb0db768b6..be88a38b807770 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--dark.png index 0376fb230f3432..0728b9da4ad59d 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--light.png index 655ce9d87c47cc..eda77f53e84d45 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--dark.png index 6b80d80b1df933..bf29f763f7162a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--light.png index 8248c8be132dbb..331ac9df5287ae 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--dark.png index f595113e318adb..57fa737a559ede 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--light.png index f8fbf21738bde2..91bf217461bb67 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--dark.png new file mode 100644 index 00000000000000..5d0408386245a0 Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--light.png new file mode 100644 index 00000000000000..6386c0c6b6f99b Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--light.png differ diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png index 7c84465a4fd8f8..e05ec0ee257d33 100644 Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png differ diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png index 8008f3eb43eca4..4d5f9882073b5a 100644 Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png index c02be573f0f342..2392b90f551175 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png index ace7d3debd6007..130aa9a287ffdf 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png index 03870e07340f47..93ad7064fd68bb 100644 Binary files a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png index 576caf4f434a2d..537566ccb05ab7 100644 Binary files a/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png index 5db2152719be93..df722bd0f2d265 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png index 0aad92c68ce5ff..477239b6b75873 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png b/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png index 899ff6dbf5378b..7fbea863d2331c 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png and b/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png b/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png index 0e1b38e9c70ad1..fa92d131cd17f4 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png and b/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png index 08312752ac112d..835b68b2ea10d7 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png and b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png index 8a55f274bf3f30..64f704c6a75bc1 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png and b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png index 587abfccb4aa58..a1c8d5a8d59af9 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png index ac19e318a3f941..695536b9bcac80 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png index 8ad5ae4d07ef66..73e5f193806a77 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png index 64bcb81eb0d5d5..6512dbd87e1db5 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png index 25ab8cd06fd020..02bc921745ecdb 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png index d4389462f0b581..248d1f34b318a7 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--dark.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--dark.png index 53a38ebe97627e..7f17c93cf8695c 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--dark.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--light.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--light.png index b6956d05b33b5a..149a6d0c31e648 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--light.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--light.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png index d606d44e5e4405..1c682a169746a1 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png index 19fd725a92aca2..4a8389daacd3a1 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png index 47129e328b11dd..d39232bc20b73b 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png index 30c69f8c33417d..a128cf65fde2e5 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png b/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png index 505db4406be403..3287c8e015277a 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png and b/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--light.png b/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--light.png index 93338096ad6c92..95511e628778dd 100644 Binary files a/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--light.png and b/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png index a3dab4071bd23f..2ecfbc10d33725 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png index 3a2fe7a7a7229d..0f4733b28c2a70 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png index dad63de9c216fa..2faf79fb30bf5a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png index 5dae24aa10f4aa..7e4b0c2348bae9 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png index a3dab4071bd23f..2ecfbc10d33725 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png index 3a2fe7a7a7229d..0f4733b28c2a70 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png index a3dab4071bd23f..2ecfbc10d33725 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png index 3a2fe7a7a7229d..0f4733b28c2a70 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png index a3dab4071bd23f..2ecfbc10d33725 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png index 3a2fe7a7a7229d..0f4733b28c2a70 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png index a3dab4071bd23f..2ecfbc10d33725 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png index 3a2fe7a7a7229d..0f4733b28c2a70 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png index a3dab4071bd23f..2ecfbc10d33725 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png index 3a2fe7a7a7229d..0f4733b28c2a70 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png index a3dab4071bd23f..2ecfbc10d33725 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png index 3a2fe7a7a7229d..0f4733b28c2a70 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png differ diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx index 81b31df6457063..244e42c52d936f 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx @@ -174,7 +174,7 @@ export const sidePanelActivityLogic = kea([ ], })), - listeners(({ values, actions }) => ({ + listeners(({ values, actions, cache }) => ({ setActiveTab: ({ tab }) => { if (tab === SidePanelActivityTab.All && !values.allActivityResponseLoading) { actions.loadAllActivity() @@ -191,6 +191,13 @@ export const sidePanelActivityLogic = kea([ actions.setActiveTab(options as SidePanelActivityTab) } }, + togglePolling: ({ pageIsVisible }) => { + if (pageIsVisible) { + actions.loadImportantChanges() + } else { + clearTimeout(cache.pollTimeout) + } + }, })), selectors({ allActivity: [ diff --git a/frontend/src/layout/navigation/ProjectNotice.tsx b/frontend/src/layout/navigation/ProjectNotice.tsx index 5a0a9c4dd28ce3..8d4df0246a3a0b 100644 --- a/frontend/src/layout/navigation/ProjectNotice.tsx +++ b/frontend/src/layout/navigation/ProjectNotice.tsx @@ -1,4 +1,5 @@ import { IconGear, IconPlus } from '@posthog/icons' +import { Spinner } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { dayjs } from 'lib/dayjs' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' @@ -22,17 +23,29 @@ interface ProjectNoticeBlueprint { closeable?: boolean } -function CountDown({ datetime }: { datetime: dayjs.Dayjs }): JSX.Element { +function CountDown({ datetime, callback }: { datetime: dayjs.Dayjs; callback?: () => void }): JSX.Element { const [now, setNow] = useState(dayjs()) + // Format the time difference as 00:00:00 + const duration = dayjs.duration(datetime.diff(now)) + const pastCountdown = duration.seconds() < 0 + + const countdown = pastCountdown + ? 'Expired' + : duration.hours() > 0 + ? duration.format('HH:mm:ss') + : duration.format('mm:ss') + useEffect(() => { const interval = setInterval(() => setNow(dayjs()), 1000) return () => clearInterval(interval) }, []) - // Format the time difference as 00:00:00 - const duration = dayjs.duration(datetime.diff(now)) - const countdown = duration.hours() > 0 ? duration.format('HH:mm:ss') : duration.format('mm:ss') + useEffect(() => { + if (pastCountdown) { + callback?.() + } + }, [pastCountdown]) return <>{countdown} } @@ -40,8 +53,8 @@ function CountDown({ datetime }: { datetime: dayjs.Dayjs }): JSX.Element { export function ProjectNotice(): JSX.Element | null { const { projectNoticeVariant } = useValues(navigationLogic) const { currentOrganization } = useValues(organizationLogic) - const { logout } = useActions(userLogic) - const { user } = useValues(userLogic) + const { logout, loadUser } = useActions(userLogic) + const { user, userLoading } = useValues(userLogic) const { closeProjectNotice } = useActions(navigationLogic) const { showInviteModal } = useActions(inviteLogic) const { requestVerificationLink } = useActions(verifyEmailLogic) @@ -124,7 +137,14 @@ export function ProjectNotice(): JSX.Element | null { You are currently logged in as a customer.{' '} {user?.is_impersonated_until && ( <> - Expires in + Expires in + {userLoading ? ( + + ) : ( + loadUser()}> + Refresh + + )} )} diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 81587232b3b077..37d394a7fa483e 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -54,6 +54,7 @@ import { ExternalDataSourceSyncSchema, ExternalDataSourceType, FeatureFlagAssociatedRoleType, + FeatureFlagStatusResponse, FeatureFlagType, Group, GroupListParams, @@ -424,8 +425,8 @@ class ApiRequest { return this.events(teamId).addPathComponent(id) } - public tags(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('tags') + public tags(projectId?: ProjectType['id']): ApiRequest { + return this.projectsDetail(projectId).addPathComponent('tags') } // # Data management @@ -520,8 +521,11 @@ class ApiRequest { return this.dashboards(teamId).addPathComponent(dashboardId) } - public dashboardCollaborators(dashboardId: DashboardType['id'], teamId?: TeamType['id']): ApiRequest { - return this.dashboardsDetail(dashboardId, teamId).addPathComponent('collaborators') + public dashboardCollaborators( + dashboardId: DashboardType['id'], + projectId: ProjectType['id'] = ApiConfig.getCurrentProjectId() // Collaborators endpoint is project-level, not team-level + ): ApiRequest { + return this.dashboardsDetail(dashboardId, projectId).addPathComponent('collaborators') } public dashboardSharing(dashboardId: DashboardType['id'], teamId?: TeamType['id']): ApiRequest { @@ -531,9 +535,9 @@ class ApiRequest { public dashboardCollaboratorsDetail( dashboardId: DashboardType['id'], userUuid: UserType['uuid'], - teamId?: TeamType['id'] + projectId?: ProjectType['id'] ): ApiRequest { - return this.dashboardCollaborators(dashboardId, teamId).addPathComponent(userUuid) + return this.dashboardCollaborators(dashboardId, projectId).addPathComponent(userUuid) } // # Dashboard templates @@ -663,6 +667,13 @@ class ApiRequest { ) } + public featureFlagStatus(teamId: TeamType['id'], featureFlagId: FeatureFlagType['id']): ApiRequest { + return this.projectsDetail(teamId) + .addPathComponent('feature_flags') + .addPathComponent(String(featureFlagId)) + .addPathComponent('status') + } + public featureFlagCreateScheduledChange(teamId: TeamType['id']): ApiRequest { return this.projectsDetail(teamId).addPathComponent('scheduled_changes') } @@ -1042,6 +1053,12 @@ const api = { ): Promise<{ scheduled_change: ScheduledChangeType }> { return await new ApiRequest().featureFlagDeleteScheduledChange(teamId, scheduledChangeId).delete() }, + async getStatus( + teamId: TeamType['id'], + featureFlagId: FeatureFlagType['id'] + ): Promise { + return await new ApiRequest().featureFlagStatus(teamId, featureFlagId).get() + }, }, organizationFeatureFlags: { @@ -1283,8 +1300,8 @@ const api = { }, tags: { - async list(teamId: TeamType['id'] = ApiConfig.getCurrentTeamId()): Promise { - return new ApiRequest().tags(teamId).get() + async list(projectId: TeamType['id'] = ApiConfig.getCurrentProjectId()): Promise { + return new ApiRequest().tags(projectId).get() }, }, diff --git a/frontend/src/lib/components/Cards/TextCard/TextCard.tsx b/frontend/src/lib/components/Cards/TextCard/TextCard.tsx index 34c889fd00cee4..698530f338d4a7 100644 --- a/frontend/src/lib/components/Cards/TextCard/TextCard.tsx +++ b/frontend/src/lib/components/Cards/TextCard/TextCard.tsx @@ -34,8 +34,8 @@ interface TextCardBodyProps extends Pick, ' export function TextContent({ text, closeDetails, className }: TextCardBodyProps): JSX.Element { return ( -
closeDetails?.()}> - {text} +
closeDetails?.()}> + {text}
) } @@ -143,7 +143,7 @@ export function TextCardInternal( )}
- +
{showResizeHandles && ( diff --git a/frontend/src/lib/components/IntervalFilter/IntervalFilter.tsx b/frontend/src/lib/components/IntervalFilter/IntervalFilter.tsx index 522ba901a977ea..d0764df2ad599a 100644 --- a/frontend/src/lib/components/IntervalFilter/IntervalFilter.tsx +++ b/frontend/src/lib/components/IntervalFilter/IntervalFilter.tsx @@ -1,4 +1,5 @@ -import { LemonSelect, LemonSelectOption } from '@posthog/lemon-ui' +import { IconPin } from '@posthog/icons' +import { LemonButton, LemonSelect, LemonSelectOption } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { insightLogic } from 'scenes/insights/insightLogic' import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' @@ -12,27 +13,43 @@ interface IntervalFilterProps { export function IntervalFilter({ disabled }: IntervalFilterProps): JSX.Element { const { insightProps } = useValues(insightLogic) - const { interval, enabledIntervals } = useValues(insightVizDataLogic(insightProps)) - const { updateQuerySource } = useActions(insightVizDataLogic(insightProps)) + const { interval, enabledIntervals, isIntervalManuallySet } = useValues(insightVizDataLogic(insightProps)) + const { updateQuerySource, setIsIntervalManuallySet } = useActions(insightVizDataLogic(insightProps)) return ( <> grouped by - { - updateQuerySource({ interval: value } as Partial) - }} - options={Object.entries(enabledIntervals).map(([value, { label, disabledReason, hidden }]) => ({ - value: value as IntervalType, - label, - hidden, - disabledReason, - }))} - /> + {isIntervalManuallySet ? ( + { + setIsIntervalManuallySet(false) + }} + tooltip="Unpin interval" + className="flex-1" + center + size="small" + icon={} + > + {interval || 'day'} + + ) : ( + { + updateQuerySource({ interval: value } as Partial) + }} + options={Object.entries(enabledIntervals).map(([value, { label, disabledReason, hidden }]) => ({ + value: value as IntervalType, + label, + hidden, + disabledReason, + }))} + /> + )} ) } diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx index 756844702c3e59..54389b51c02715 100644 --- a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx +++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx @@ -166,7 +166,7 @@ function ValueDisplay({ } > : undefined} > @@ -222,8 +222,8 @@ export function PropertiesTable({ parent, }: PropertiesTableType): JSX.Element { const [searchTerm, setSearchTerm] = useState('') - const { hidePostHogPropertiesInTable } = useValues(userPreferencesLogic) - const { setHidePostHogPropertiesInTable } = useActions(userPreferencesLogic) + const { hidePostHogPropertiesInTable, hideNullValues } = useValues(userPreferencesLogic) + const { setHidePostHogPropertiesInTable, setHideNullValues } = useActions(userPreferencesLogic) const { isCloudOrDev } = useValues(preflightLogic) const objectProperties = useMemo(() => { @@ -283,11 +283,18 @@ export function PropertiesTable({ }) } - if (filterable && hidePostHogPropertiesInTable) { - entries = entries.filter(([key]) => { - const isPostHogProperty = key.startsWith('$') || PROPERTY_KEYS.includes(key) - const isNonDollarPostHogProperty = isCloudOrDev && CLOUD_INTERNAL_POSTHOG_PROPERTY_KEYS.includes(key) - return !isPostHogProperty && !isNonDollarPostHogProperty + if (filterable) { + entries = entries.filter(([key, value]) => { + if (hideNullValues && value === null) { + return false + } + if (hidePostHogPropertiesInTable) { + const isPostHogProperty = key.startsWith('$') || PROPERTY_KEYS.includes(key) + const isNonDollarPostHogProperty = + isCloudOrDev && CLOUD_INTERNAL_POSTHOG_PROPERTY_KEYS.includes(key) + return !isPostHogProperty && !isNonDollarPostHogProperty + } + return true }) } @@ -299,7 +306,7 @@ export function PropertiesTable({ }) } return entries - }, [properties, sortProperties, searchTerm, hidePostHogPropertiesInTable]) + }, [properties, sortProperties, searchTerm, hidePostHogPropertiesInTable, hideNullValues]) if (Array.isArray(properties)) { return ( @@ -424,7 +431,7 @@ export function PropertiesTable({ return ( <> {(searchable || filterable) && ( -
+
{searchable && ( )} {filterable && ( - + <> + + + + )} @@ -467,6 +483,7 @@ export function PropertiesTable({ onClick={() => { setSearchTerm('') setHidePostHogPropertiesInTable(false) + setHideNullValues(false) }} > Clear filters diff --git a/frontend/src/lib/components/Support/supportLogic.ts b/frontend/src/lib/components/Support/supportLogic.ts index a9389640cbeb65..3e0f6700745987 100644 --- a/frontend/src/lib/components/Support/supportLogic.ts +++ b/frontend/src/lib/components/Support/supportLogic.ts @@ -46,6 +46,29 @@ function getSessionReplayLink(): string { return `\nSession: ${replayUrl}` } +function getErrorTrackingLink(): string { + const filterGroup = encodeURIComponent( + JSON.stringify({ + type: 'AND', + values: [ + { + type: 'AND', + values: [ + { + key: '$session_id', + value: [posthog.get_session_id()], + operator: 'exact', + type: 'event', + }, + ], + }, + ], + }) + ) + + return `\nExceptions: https://us.posthog.com/project/2/error_tracking?filterGroup=${filterGroup}` +} + function getDjangoAdminLink( user: UserType | null, cloudRegion: Region | null | undefined, @@ -178,6 +201,11 @@ export const TARGET_AREA_TO_NAME = [ 'data-attr': `support-form-target-area-web_analytics`, label: 'Web Analytics', }, + { + value: 'error_tracking', + 'data-attr': `support-form-target-area-error_tracking`, + label: 'Error tracking', + }, ], }, ] @@ -211,6 +239,7 @@ export type SupportTicketTargetArea = | 'toolbar' | 'surveys' | 'web_analytics' + | 'error_tracking' export type SupportTicketSeverityLevel = keyof typeof SEVERITY_LEVEL_TO_NAME export type SupportTicketKind = keyof typeof SUPPORT_KIND_TO_SUBJECT @@ -446,6 +475,7 @@ export const supportLogic = kea([ `\nTarget area: ${target_area}` + `\nReport event: http://go/ticketByUUID/${zendesk_ticket_uuid}` + getSessionReplayLink() + + getErrorTrackingLink() + getCurrentLocationLink() + getDjangoAdminLink( userLogic.values.user, diff --git a/frontend/src/lib/components/ViewRecordingButton.tsx b/frontend/src/lib/components/ViewRecordingButton.tsx index 1d0c7adb1d4b0c..37de0ba0eaae05 100644 --- a/frontend/src/lib/components/ViewRecordingButton.tsx +++ b/frontend/src/lib/components/ViewRecordingButton.tsx @@ -10,20 +10,27 @@ import { EventType } from '~/types' export default function ViewRecordingButton({ sessionId, timestamp, + inModal = false, ...props }: Pick & { sessionId: string timestamp?: string | Dayjs + // whether to open in a modal or navigate to the replay page + inModal?: boolean }): JSX.Element { const { openSessionPlayer } = useActions(sessionPlayerModalLogic) return ( { - const fiveSecondsBeforeEvent = dayjs(timestamp).valueOf() - 5000 - openSessionPlayer({ id: sessionId }, Math.max(fiveSecondsBeforeEvent, 0)) - }} + to={inModal ? undefined : urls.replaySingle(sessionId)} + onClick={ + inModal + ? () => { + const fiveSecondsBeforeEvent = timestamp ? dayjs(timestamp).valueOf() - 5000 : 0 + openSessionPlayer({ id: sessionId }, Math.max(fiveSecondsBeforeEvent, 0)) + } + : undefined + } sideIcon={} {...props} > diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 6becb7998a814f..4f5dab68b99420 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -165,6 +165,7 @@ export const FEATURE_FLAGS = { PERSON_FEED_CANVAS: 'person-feed-canvas', // owner: #project-canvas FEATURE_FLAG_COHORT_CREATION: 'feature-flag-cohort-creation', // owner: @neilkakkar #team-feature-success INSIGHT_HORIZONTAL_CONTROLS: 'insight-horizontal-controls', // owner: @benjackwhite + SURVEYS_ADAPTIVE_LIMITS: 'surveys-adaptive-limits', // owner: #team-feature-success SURVEYS_WIDGETS: 'surveys-widgets', // owner: #team-feature-success SURVEYS_EVENTS: 'surveys-events', // owner: #team-feature-success SURVEYS_ACTIONS: 'surveys-actions', // owner: #team-feature-success @@ -221,7 +222,6 @@ export const FEATURE_FLAGS = { EXPERIMENTS_HOGQL: 'experiments-hogql', // owner: @jurajmajerik #team-experiments ROLE_BASED_ACCESS_CONTROL: 'role-based-access-control', // owner: @zach MESSAGING: 'messaging', // owner @mariusandra #team-cdp - SESSION_REPLAY_URL_BLOCKLIST: 'session-replay-url-blocklist', // owner: @richard-better #team-replay BILLING_TRIAL_FLOW: 'billing-trial-flow', // owner: @zach EDIT_DWH_SOURCE_CONFIG: 'edit_dwh_source_config', // owner: @Gilbert09 #team-data-warehouse AI_SURVEY_RESPONSE_SUMMARY: 'ai-survey-response-summary', // owner: @pauldambra @@ -235,10 +235,13 @@ export const FEATURE_FLAGS = { REMOTE_CONFIG: 'remote-config', // owner: @benjackwhite SITE_DESTINATIONS: 'site-destinations', // owner: @mariusandra #team-cdp SITE_APP_FUNCTIONS: 'site-app-functions', // owner: @mariusandra #team-cdp + HOG_TRANSFORMATIONS: 'hog-transformations', // owner: #team-cdp REPLAY_HOGQL_FILTERS: 'replay-hogql-filters', // owner: @pauldambra #team-replay REPLAY_LIST_RECORDINGS_AS_QUERY: 'replay-list-recordings-as-query', // owner: @pauldambra #team-replay BILLING_SKIP_FORECASTING: 'billing-skip-forecasting', // owner: @zach + EXPERIMENT_STATS_V2: 'experiment-stats-v2', // owner: @danielbachhuber #team-experiments WEB_ANALYTICS_PERIOD_COMPARISON: 'web-analytics-period-comparison', // owner: @rafaeelaudibert #team-web-analytics + WEB_ANALYTICS_CONVERSION_GOAL_FILTERS: 'web-analytics-conversion-goal-filters', // owner: @rafaeelaudibert #team-web-analytics } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/integrations/IntegrationScopesWarning.tsx b/frontend/src/lib/integrations/IntegrationScopesWarning.tsx new file mode 100644 index 00000000000000..c9e6c7a61d7644 --- /dev/null +++ b/frontend/src/lib/integrations/IntegrationScopesWarning.tsx @@ -0,0 +1,65 @@ +import api from 'lib/api' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { Link } from 'lib/lemon-ui/Link' +import { useMemo } from 'react' + +import { HogFunctionInputSchemaType, IntegrationType } from '~/types' + +export function IntegrationScopesWarning({ + integration, + schema, +}: { + integration: IntegrationType + schema?: HogFunctionInputSchemaType +}): JSX.Element { + const getScopes = useMemo((): string[] => { + const scopes: any[] = [] + const possibleScopeLocation = [integration.config.scope, integration.config.scopes] + + possibleScopeLocation.map((scope) => { + if (typeof scope === 'string') { + scopes.push(scope.split(' ')) + scopes.push(scope.split(',')) + } + if (typeof scope === 'object') { + scopes.push(scope) + } + }) + return scopes + .filter((scope: any) => typeof scope === 'object') + .reduce((a, b) => (a.length > b.length ? a : b), []) + }, [integration.config]) + + const requiredScopes = schema?.requiredScopes?.split(' ') || [] + const missingScopes = requiredScopes.filter((scope: string) => !getScopes.includes(scope)) + + if (missingScopes.length === 0 || getScopes.length === 0) { + return <> + } + return ( +
+ + Required scopes are missing: [{missingScopes.join(', ')}]. + {integration.kind === 'hubspot' ? ( + + Note that some features may not be available on your current HubSpot plan. Check out{' '} + + this page + {' '} + for more details. + + ) : null} + +
+ ) +} diff --git a/frontend/src/lib/integrations/IntegrationView.tsx b/frontend/src/lib/integrations/IntegrationView.tsx index 31cd12e82eb404..80590299bda4d7 100644 --- a/frontend/src/lib/integrations/IntegrationView.tsx +++ b/frontend/src/lib/integrations/IntegrationView.tsx @@ -1,15 +1,18 @@ import { LemonBanner } from '@posthog/lemon-ui' import api from 'lib/api' import { UserActivityIndicator } from 'lib/components/UserActivityIndicator/UserActivityIndicator' +import { IntegrationScopesWarning } from 'lib/integrations/IntegrationScopesWarning' -import { IntegrationType } from '~/types' +import { HogFunctionInputSchemaType, IntegrationType } from '~/types' export function IntegrationView({ integration, suffix, + schema, }: { integration: IntegrationType suffix?: JSX.Element + schema?: HogFunctionInputSchemaType }): JSX.Element { const errors = (integration.errors && integration.errors?.split(',')) || [] @@ -36,7 +39,7 @@ export function IntegrationView({ {suffix}
- {errors.length > 0 && ( + {errors.length > 0 ? (
+ ) : ( + )}
) diff --git a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.scss b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.scss index b97f0e30bcc3fe..cf41641c4c8eac 100644 --- a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.scss +++ b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.scss @@ -1,4 +1,5 @@ .LemonCalendar { + --lemon-calendar-month-height: 305px; --lemon-calendar-row-gap: 2px; --lemon-calendar-day-width: 40px; --lemon-calendar-today-radius: 2px; @@ -7,6 +8,9 @@ // Tricky: needs to match the equivalent height button from LemonButton.scss --lemon-calendar-time-button-height: 2.3125rem; + // Force height for month so when swtiching to longer months, the height doesn't change + height: var(--lemon-calendar-month-height); + .LemonCalendar__month { width: 100%; } @@ -23,6 +27,10 @@ } .LemonCalendar__month tr { + &.LemonCalendar__month-header { + height: var(--lemon-calendar-time-button-height); + } + .LemonButton { &.rounded-none { border-radius: 0; diff --git a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.tsx b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.tsx index f955b57a5de06b..632cc6c2af2293 100644 --- a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.tsx +++ b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.tsx @@ -81,7 +81,7 @@ export const LemonCalendar = forwardRef(function LemonCalendar( return ( - +
{showLeftMonth && ( ([ path(['lib', 'logic', 'userPreferencesLogic']), actions({ setHidePostHogPropertiesInTable: (enabled: boolean) => ({ enabled }), + setHideNullValues: (enabled: boolean) => ({ enabled }), }), reducers(() => ({ hidePostHogPropertiesInTable: [ @@ -16,5 +17,6 @@ export const userPreferencesLogic = kea([ setHidePostHogPropertiesInTable: (_, { enabled }) => enabled, }, ], + hideNullValues: [true, { persist: true }, { setHideNullValues: (_, { enabled }) => enabled }], })), ]) diff --git a/frontend/src/loadPostHogJS.tsx b/frontend/src/loadPostHogJS.tsx index badabf1105246e..4dfc4e30ee47ff 100644 --- a/frontend/src/loadPostHogJS.tsx +++ b/frontend/src/loadPostHogJS.tsx @@ -67,6 +67,7 @@ export function loadPostHogJS(): void { capture_copied_text: true, }, person_profiles: 'always', + __preview_remote_config: true, // Helper to capture events for assertions in Cypress _onCapture: (window as any)._cypress_posthog_captures diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts index ba1b6d94786802..f1bbaa4516c55c 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts @@ -191,6 +191,10 @@ export const dataNodeLogic = kea([ if (cache.localResults[stringifiedQuery] && !refresh) { return cache.localResults[stringifiedQuery] } + + if (!query.query) { + return null + } } if (!values.currentTeamId) { @@ -337,6 +341,12 @@ export const dataNodeLogic = kea([ ], })), reducers(({ props }) => ({ + isRefresh: [ + false, + { + loadData: (_, { refresh }) => !!refresh, + }, + ], dataLoading: [ false, { @@ -474,8 +484,12 @@ export const dataNodeLogic = kea([ (variablesOverride) => !!variablesOverride, ], isShowingCachedResults: [ - () => [(_, props) => props.cachedResults ?? null, (_, props) => props.query], - (cachedResults: AnyResponseType | null, query: DataNode): boolean => { + (s) => [(_, props) => props.cachedResults ?? null, (_, props) => props.query, s.isRefresh], + (cachedResults: AnyResponseType | null, query: DataNode, isRefresh): boolean => { + if (isRefresh) { + return false + } + return ( !!cachedResults || (cache.localResults && 'query' in query && JSON.stringify(query.query) in cache.localResults) @@ -630,23 +644,26 @@ export const dataNodeLogic = kea([ (s) => [s.nextAllowedRefresh, s.lastRefresh], (nextAllowedRefresh: string | null, lastRefresh: string | null) => (): string => { const now = dayjs() - let disabledReason = '' - if (!!nextAllowedRefresh && now.isBefore(dayjs(nextAllowedRefresh))) { - // If this is a saved insight, the result will contain nextAllowedRefresh, and we use that to disable the button - disabledReason = `You can refresh this insight again ${dayjs(nextAllowedRefresh).from(now)}` - } else if ( - !!lastRefresh && - now.subtract(UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES - 0.5, 'minutes').isBefore(lastRefresh) - ) { - // Unsaved insights don't get cached and get refreshed on every page load, but we avoid allowing users to click - // 'refresh' more than once every UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES. This can be bypassed by simply - // refreshing the page though, as there's no cache layer on the backend - disabledReason = `You can refresh this insight again ${dayjs(lastRefresh) - .add(UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES, 'minutes') - .from(now)}` + // Saved insights has a nextAllowedRefresh we use to check if the user can refresh again + if (nextAllowedRefresh) { + const nextRefreshTime = dayjs(nextAllowedRefresh) + if (now.isBefore(nextRefreshTime)) { + return `You can refresh this insight again ${nextRefreshTime.from(now)}` + } } - - return disabledReason + // For unsaved insights we check the last refresh time + if (lastRefresh) { + const earliestRefresh = dayjs(lastRefresh).add( + UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES, + 'minutes' + ) + if (now.isBefore(earliestRefresh)) { + return `You can refresh this insight again ${earliestRefresh.from(now)}` + } + } + // If we don't have a nextAllowedRefresh or lastRefresh, we can refresh, so we + // return an empty string + return '' }, ], timings: [ diff --git a/frontend/src/queries/nodes/DataTable/EventRowActions.tsx b/frontend/src/queries/nodes/DataTable/EventRowActions.tsx index 5a2830a135ef28..5ac043b46654fd 100644 --- a/frontend/src/queries/nodes/DataTable/EventRowActions.tsx +++ b/frontend/src/queries/nodes/DataTable/EventRowActions.tsx @@ -55,6 +55,7 @@ export function EventRowActions({ event }: EventActionProps): JSX.Element { )} props.setQuery?.({ ...props.query, source }), - [props.setQuery] + [props.setQuery, props.query] ) let component: JSX.Element | null = null diff --git a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts index fa9037cdcada92..4480fe99777554 100644 --- a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts @@ -1,7 +1,7 @@ -import { actions, afterMount, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea' +import { actions, afterMount, connect, kea, key, listeners, path, props, propsChanged, reducers, selectors } from 'kea' import { subscriptions } from 'kea-subscriptions' import { dayjs } from 'lib/dayjs' -import { lightenDarkenColor, RGBToHex, uuid } from 'lib/utils' +import { lightenDarkenColor, objectsEqual, RGBToHex, uuid } from 'lib/utils' import mergeObject from 'lodash.merge' import { teamLogic } from 'scenes/teamLogic' @@ -242,6 +242,11 @@ export const dataVisualizationLogic = kea([ ['loadData'], ], })), + propsChanged(({ actions, props }, oldProps) => { + if (props.query && !objectsEqual(props.query, oldProps.query)) { + actions._setQuery(props.query) + } + }), props({ query: { source: {} } } as DataVisualizationLogicProps), actions(({ values }) => ({ setVisualizationType: (visualizationType: ChartDisplayType) => ({ visualizationType }), @@ -280,12 +285,14 @@ export const dataVisualizationLogic = kea([ colorMode: values.isDarkModeOn ? 'dark' : 'light', }), setConditionalFormattingRulesPanelActiveKeys: (keys: string[]) => ({ keys }), + _setQuery: (node: DataVisualizationNode) => ({ node }), })), reducers(({ props }) => ({ query: [ props.query, { setQuery: (_, { node }) => node, + _setQuery: (_, { node }) => node, }, ], visualizationType: [ diff --git a/frontend/src/queries/nodes/EventsNode/EventPropertyFilters.tsx b/frontend/src/queries/nodes/EventsNode/EventPropertyFilters.tsx index d8364d21c2508d..d532805db83a51 100644 --- a/frontend/src/queries/nodes/EventsNode/EventPropertyFilters.tsx +++ b/frontend/src/queries/nodes/EventsNode/EventPropertyFilters.tsx @@ -1,7 +1,9 @@ +import { useValues } from 'kea' import { PropertyFilters } from 'lib/components/PropertyFilters/PropertyFilters' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { useState } from 'react' +import { groupsModel } from '~/models/groupsModel' import { EventsNode, EventsQuery, HogQLQuery, SessionAttributionExplorerQuery } from '~/queries/schema' import { isHogQLQuery, isSessionAttributionExplorerQuery } from '~/queries/utils' import { AnyPropertyFilter } from '~/types' @@ -21,6 +23,7 @@ export function EventPropertyFilters< isHogQLQuery(query) || isSessionAttributionExplorerQuery(query) ? query.filters?.properties : query.properties const eventNames = isHogQLQuery(query) || isSessionAttributionExplorerQuery(query) ? [] : query.event ? [query.event] : [] + const { groupsTaxonomicTypes } = useValues(groupsModel) return !properties || Array.isArray(properties) ? ( - loadData(true)}> - Refresh - + + loadData(true)} + className={disabledReason ? 'opacity-50' : ''} + disabledReason={canBypassRefreshDisabled ? '' : disabledReason} + > + Refresh + + )} diff --git a/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx b/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx index 58bcbf6154cde6..90f036c734bc4c 100644 --- a/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx +++ b/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx @@ -77,7 +77,7 @@ export function EditorFilters({ query, showing, embedded }: EditorFiltersProps): isStepsFunnel || isTrendsFunnel const hasPathsAdvanced = hasAvailableFeature(AvailableFeature.PATHS_ADVANCED) - const hasAttribution = isStepsFunnel + const hasAttribution = isStepsFunnel || isTrendsFunnel const hasPathsHogQL = isPaths && pathsFilter?.includeEventTypes?.includes(PathType.HogQL) const editorFilters: InsightEditorFilterGroup[] = [ diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index b4d28dbbf37767..c2d5aadc147bb0 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -1470,7 +1470,15 @@ "type": "string" }, "BaseMathType": { - "enum": ["total", "dau", "weekly_active", "monthly_active", "unique_session", "first_time_for_user"], + "enum": [ + "total", + "dau", + "weekly_active", + "monthly_active", + "unique_session", + "first_time_for_user", + "first_matching_event_for_user" + ], "type": "string" }, "BinCountValue": { @@ -2246,6 +2254,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "timezone": { "type": "string" }, @@ -4585,6 +4596,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "variants": { "items": { "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" @@ -6090,6 +6104,9 @@ }, "response": { "$ref": "#/definitions/ExperimentTrendsQueryResponse" + }, + "stats_version": { + "type": "integer" } }, "required": ["count_query", "kind"], @@ -6140,6 +6157,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "variants": { "items": { "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" @@ -9855,6 +9875,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "variants": { "items": { "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" @@ -10483,6 +10506,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "variants": { "items": { "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" @@ -12666,6 +12692,17 @@ "$ref": "#/definitions/CompareFilter", "description": "Compare to date range" }, + "conversionGoal": { + "anyOf": [ + { + "$ref": "#/definitions/WebAnalyticsConversionGoal" + }, + { + "type": "null" + } + ], + "description": "Whether we should be comparing against a specific conversion goal" + }, "dateRange": { "$ref": "#/definitions/InsightDateRange", "description": "Date range for the query" diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index cddaec321af3c0..9f8f101449564f 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -910,6 +910,8 @@ export interface TrendsQuery extends InsightsQueryBase { breakdownFilter?: BreakdownFilter /** Compare to date range */ compareFilter?: CompareFilter + /** Whether we should be comparing against a specific conversion goal */ + conversionGoal?: WebAnalyticsConversionGoal | null } export type AssistantArrayPropertyFilterOperator = PropertyOperator.Exact | PropertyOperator.IsNot @@ -2005,6 +2007,7 @@ export interface ExperimentTrendsQueryResponse { probability: Record significant: boolean significance_code: ExperimentSignificanceCode + stats_version?: integer p_value: number credible_intervals: Record } @@ -2040,6 +2043,7 @@ export interface ExperimentTrendsQuery extends DataNode([ { validateEmailToken: async ({ uuid, token }: { uuid: string; token: string }, breakpoint) => { try { - await api.create(`api/users/${uuid}/verify_email/`, { token, uuid }) + await api.create(`api/users/verify_email/`, { token, uuid }) actions.setView('success') await breakpoint(2000) window.location.href = '/' @@ -48,7 +48,7 @@ export const verifyEmailLogic = kea([ { requestVerificationLink: async ({ uuid }: { uuid: string }) => { try { - await api.create(`api/users/${uuid}/request_email_verification/`, { uuid }) + await api.create(`api/users/request_email_verification/`, { uuid }) lemonToast.success( 'A new verification link has been sent to the associated email address. Please check your inbox.' ) diff --git a/frontend/src/scenes/authentication/twoFactorLogic.ts b/frontend/src/scenes/authentication/twoFactorLogic.ts index 9f5c1e5eecb5a1..43d31a7f4d189f 100644 --- a/frontend/src/scenes/authentication/twoFactorLogic.ts +++ b/frontend/src/scenes/authentication/twoFactorLogic.ts @@ -37,6 +37,7 @@ export const twoFactorLogic = kea([ toggleTwoFactorSetupModal: (open: boolean) => ({ open }), toggleDisable2FAModal: (open: boolean) => ({ open }), toggleBackupCodesModal: (open: boolean) => ({ open }), + startSetup: true, }), reducers({ isTwoFactorSetupModalOpen: [ @@ -158,9 +159,13 @@ export const twoFactorLogic = kea([ actions.resetToken() } }, + startSetup: async () => { + await api.get('api/users/@me/two_factor_start_setup/') + }, })), afterMount(({ actions }) => { + actions.startSetup() actions.loadStatus() }), ]) diff --git a/frontend/src/scenes/dashboard/newDashboardLogic.test.tsx b/frontend/src/scenes/dashboard/newDashboardLogic.test.tsx index 03757f853741fa..e694c94ea1ad08 100644 --- a/frontend/src/scenes/dashboard/newDashboardLogic.test.tsx +++ b/frontend/src/scenes/dashboard/newDashboardLogic.test.tsx @@ -1,36 +1,46 @@ +import { NodeKind } from '~/queries/schema' + import { applyTemplate } from './newDashboardLogic' describe('template function in newDashboardLogic', () => { it('ignores unused variables', () => { expect( - applyTemplate({ a: 'hello', b: 'hi' }, [ - { - id: 'VARIABLE_1', - name: 'a', - default: { - event: '$pageview', + applyTemplate( + { a: 'hello', b: 'hi' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + event: '$pageview', + }, + description: 'The description of the variable', + required: true, + type: 'event', }, - description: 'The description of the variable', - required: true, - type: 'event', - }, - ]) + ], + null + ) ).toEqual({ a: 'hello', b: 'hi' }) }) it('uses identified variables', () => { expect( - applyTemplate({ a: '{VARIABLE_1}', b: 'hi' }, [ - { - id: 'VARIABLE_1', - name: 'a', - default: { - event: '$pageview', + applyTemplate( + { a: '{VARIABLE_1}', b: 'hi' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + event: '$pageview', + }, + description: 'The description of the variable', + required: true, + type: 'event', }, - description: 'The description of the variable', - required: true, - type: 'event', - }, - ]) + ], + null + ) ).toEqual({ a: { event: '$pageview', @@ -38,4 +48,85 @@ describe('template function in newDashboardLogic', () => { b: 'hi', }) }) + + it('replaces variables in query based tiles', () => { + expect( + applyTemplate( + { a: '{VARIABLE_1}' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + id: '$pageview', + }, + description: 'The description of the variable', + required: true, + type: 'event', + }, + ], + NodeKind.TrendsQuery + ) + ).toEqual({ + a: { + event: '$pageview', + kind: 'EventsNode', + math: 'total', + }, + }) + }) + + it("removes the math property from query based tiles that don't support it", () => { + expect( + applyTemplate( + { a: '{VARIABLE_1}' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + id: '$pageview', + }, + description: 'The description of the variable', + required: true, + type: 'event', + }, + ], + NodeKind.LifecycleQuery + ) + ).toEqual({ + a: { + event: '$pageview', + kind: 'EventsNode', + }, + }) + }) + + it('removes the math property from retention insight tiles', () => { + expect( + applyTemplate( + { a: '{VARIABLE_1}' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + id: '$pageview', + math: 'dau' as any, + type: 'events' as any, + }, + description: 'The description of the variable', + required: true, + type: 'event', + }, + ], + NodeKind.RetentionQuery + ) + ).toEqual({ + a: { + id: '$pageview', + type: 'events', + }, + }) + }) }) diff --git a/frontend/src/scenes/dashboard/newDashboardLogic.ts b/frontend/src/scenes/dashboard/newDashboardLogic.ts index 67490678722580..564a24f736c1fe 100644 --- a/frontend/src/scenes/dashboard/newDashboardLogic.ts +++ b/frontend/src/scenes/dashboard/newDashboardLogic.ts @@ -5,11 +5,15 @@ import api from 'lib/api' import { DashboardRestrictionLevel } from 'lib/constants' import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { dashboardsModel } from '~/models/dashboardsModel' +import { legacyEntityToNode, sanitizeRetentionEntity } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' import { getQueryBasedDashboard } from '~/queries/nodes/InsightViz/utils' +import { NodeKind } from '~/queries/schema' +import { isInsightVizNode } from '~/queries/utils' import { DashboardTemplateType, DashboardTemplateVariableType, DashboardTile, DashboardType, JsonType } from '~/types' import type { newDashboardLogicType } from './newDashboardLogicType' @@ -35,24 +39,47 @@ export interface NewDashboardLogicProps { } // Currently this is a very generic recursive function incase we want to add template variables to aspects beyond events -export function applyTemplate(obj: DashboardTile | JsonType, variables: DashboardTemplateVariableType[]): JsonType { +export function applyTemplate( + obj: DashboardTile | JsonType, + variables: DashboardTemplateVariableType[], + queryKind: NodeKind | null +): JsonType { if (typeof obj === 'string') { if (obj.startsWith('{') && obj.endsWith('}')) { const variableId = obj.substring(1, obj.length - 1) const variable = variables.find((variable) => variable.id === variableId) if (variable && variable.default) { + // added for future compatibility - at the moment we only have event variables + const isEventVariable = variable.type === 'event' + + if (queryKind && isEventVariable) { + let mathAvailability = MathAvailability.None + if (queryKind === NodeKind.TrendsQuery) { + mathAvailability = MathAvailability.All + } else if (queryKind === NodeKind.StickinessQuery) { + mathAvailability = MathAvailability.ActorsOnly + } else if (queryKind === NodeKind.FunnelsQuery) { + mathAvailability = MathAvailability.FunnelsOnly + } + return ( + queryKind === NodeKind.RetentionQuery + ? sanitizeRetentionEntity(variable.default as any) + : legacyEntityToNode(variable.default as any, true, mathAvailability) + ) as JsonType + } + return variable.default as JsonType } return obj } } if (Array.isArray(obj)) { - return obj.map((item) => applyTemplate(item, variables)) + return obj.map((item) => applyTemplate(item, variables, queryKind)) } if (typeof obj === 'object' && obj !== null) { const newObject: JsonType = {} for (const [key, value] of Object.entries(obj)) { - newObject[key] = applyTemplate(value, variables) + newObject[key] = applyTemplate(value, variables, queryKind) } return newObject } @@ -60,7 +87,15 @@ export function applyTemplate(obj: DashboardTile | JsonType, variables: Dashboar } function makeTilesUsingVariables(tiles: DashboardTile[], variables: DashboardTemplateVariableType[]): JsonType[] { - return tiles.map((tile: DashboardTile) => applyTemplate(tile, variables)) + return tiles.map((tile: DashboardTile) => { + const isQueryBased = 'query' in tile && tile.query != null + const queryKind: NodeKind | null = isQueryBased + ? isInsightVizNode(tile.query as any) + ? (tile.query as any)?.source.kind + : (tile.query as any)?.kind + : null + return applyTemplate(tile, variables, queryKind) + }) } export const newDashboardLogic = kea([ diff --git a/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx b/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx index 28974600dfe387..f3ac96bb2d9497 100644 --- a/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx +++ b/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx @@ -1,7 +1,7 @@ import 'react-data-grid/lib/styles.css' import { IconGear } from '@posthog/icons' -import { LemonButton, LemonTabs, Spinner } from '@posthog/lemon-ui' +import { LemonButton, LemonTabs } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useValues } from 'kea' import { AnimationType } from 'lib/animations/animations' @@ -9,12 +9,13 @@ import { Animation } from 'lib/components/Animation/Animation' import { ExportButton } from 'lib/components/ExportButton/ExportButton' import { useMemo } from 'react' import DataGrid from 'react-data-grid' -import { InsightErrorState } from 'scenes/insights/EmptyStates' +import { InsightErrorState, StatelessInsightLoadingState } from 'scenes/insights/EmptyStates' import { HogQLBoldNumber } from 'scenes/insights/views/BoldNumber/BoldNumber' import { KeyboardShortcut } from '~/layout/navigation-3000/components/KeyboardShortcut' import { themeLogic } from '~/layout/navigation-3000/themeLogic' import { dataNodeLogic } from '~/queries/nodes/DataNode/dataNodeLogic' +import { ElapsedTime } from '~/queries/nodes/DataNode/ElapsedTime' import { LineGraph } from '~/queries/nodes/DataVisualization/Components/Charts/LineGraph' import { SideBar } from '~/queries/nodes/DataVisualization/Components/SideBar' import { Table } from '~/queries/nodes/DataVisualization/Components/Table' @@ -37,15 +38,14 @@ export function OutputPane(): JSX.Element { const { variablesForInsight } = useValues(variablesLogic) const { editingView, sourceQuery, exportContext, isValidView, error } = useValues(multitabEditorLogic) - const { saveAsInsight, saveAsView, setSourceQuery } = useActions(multitabEditorLogic) + const { saveAsInsight, saveAsView, setSourceQuery, runQuery } = useActions(multitabEditorLogic) const { isDarkModeOn } = useValues(themeLogic) - const { response, responseLoading } = useValues(dataNodeLogic) - const { loadData } = useActions(dataNodeLogic) + const { response, responseLoading, responseError, queryId, pollResponse } = useValues(dataNodeLogic) const { dataWarehouseSavedQueriesLoading } = useValues(dataWarehouseViewsLogic) const { updateDataWarehouseSavedQuery } = useActions(dataWarehouseViewsLogic) - const { visualizationType } = useValues(dataVisualizationLogic) + const { visualizationType, queryCancelled } = useValues(dataVisualizationLogic) - const vizKey = `SQLEditorScene` + const vizKey = useMemo(() => `SQLEditorScene`, []) const columns = useMemo(() => { return ( @@ -70,44 +70,6 @@ export function OutputPane(): JSX.Element { }) }, [response]) - const Content = (): JSX.Element | null => { - if (activeTab === OutputTab.Results) { - return responseLoading ? ( - - ) : !response ? ( - Query results will appear here - ) : ( -
- -
- ) - } - - if (activeTab === OutputTab.Visualization) { - return !response ? ( - Query be results will be visualized here - ) : ( -
- -
- ) - } - - return null - } - return (
{variablesForInsight.length > 0 && ( @@ -182,7 +144,7 @@ export function OutputPane(): JSX.Element { disabledReason={error ? error : ''} loading={responseLoading} type="primary" - onClick={() => loadData(true)} + onClick={() => runQuery()} > Run @@ -190,7 +152,26 @@ export function OutputPane(): JSX.Element {
- + +
+
+
) @@ -206,8 +187,6 @@ function InternalDataTableVisualization( showResultControls, response, responseLoading, - responseError, - queryCancelled, isChartSettingsPanelOpen, } = useValues(dataVisualizationLogic) @@ -251,29 +230,7 @@ function InternalDataTableVisualization( )} -
- {visualizationType !== ChartDisplayType.ActionsTable && responseError ? ( -
- -
- ) : ( - component - )} -
+
{component}
{showResultControls && ( <> @@ -302,3 +259,97 @@ function InternalDataTableVisualization( ) } + +const ErrorState = ({ responseError, sourceQuery, queryCancelled, response }: any): JSX.Element | null => { + return ( +
+ +
+ ) +} + +const Content = ({ + activeTab, + responseError, + responseLoading, + response, + sourceQuery, + queryCancelled, + columns, + rows, + isDarkModeOn, + vizKey, + setSourceQuery, + exportContext, + saveAsInsight, + queryId, + pollResponse, +}: any): JSX.Element | null => { + if (activeTab === OutputTab.Results) { + if (responseError) { + return ( + + ) + } + + return responseLoading ? ( + + ) : !response ? ( + Query results will appear here + ) : ( +
+ +
+ ) + } + + if (activeTab === OutputTab.Visualization) { + if (responseError) { + return ( + + ) + } + + return !response ? ( + Query be results will be visualized here + ) : ( +
+ +
+ ) + } + + return null +} diff --git a/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx b/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx index 35a41c0f402b7b..85c9d80ef6270d 100644 --- a/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx +++ b/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx @@ -14,7 +14,7 @@ interface QueryTabsProps { export function QueryTabs({ models, onClear, onClick, onAdd, activeModelUri }: QueryTabsProps): JSX.Element { return ( -
+
{models.map((model: QueryTab) => ( - +
+
+ +
{editingView && (
Editing view "{editingView.name}" @@ -85,16 +86,13 @@ export function QueryWindow(): JSX.Element { ) } -function InternalQueryWindow(): JSX.Element { +function InternalQueryWindow(): JSX.Element | null { const { cacheLoading, sourceQuery, queryInput } = useValues(multitabEditorLogic) const { setSourceQuery } = useActions(multitabEditorLogic) + // NOTE: hacky way to avoid flicker loading if (cacheLoading) { - return ( -
- -
- ) + return null } const dataVisualizationLogicProps: DataVisualizationLogicProps = { diff --git a/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx b/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx index e8960dfa8d81f3..740ea33aced838 100644 --- a/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx +++ b/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx @@ -54,7 +54,7 @@ export const multitabEditorLogic = kea([ actions({ setQueryInput: (queryInput: string) => ({ queryInput }), updateState: true, - runQuery: (queryOverride?: string) => ({ queryOverride }), + runQuery: (queryOverride?: string, switchTab?: boolean) => ({ queryOverride, switchTab }), setActiveQuery: (query: string) => ({ query }), setTabs: (tabs: QueryTab[]) => ({ tabs }), addTab: (tab: QueryTab) => ({ tab }), @@ -311,7 +311,7 @@ export const multitabEditorLogic = kea([ }) localStorage.setItem(editorModelsStateKey(props.key), JSON.stringify(queries)) }, - runQuery: ({ queryOverride }) => { + runQuery: ({ queryOverride, switchTab }) => { const query = queryOverride || values.queryInput actions.setSourceQuery({ @@ -328,7 +328,7 @@ export const multitabEditorLogic = kea([ query, }, autoLoad: false, - }).actions.loadData(true) + }).actions.loadData(!switchTab) }, saveAsView: async () => { LemonDialog.openForm({ @@ -418,7 +418,7 @@ export const multitabEditorLogic = kea([ const _model = props.monaco.editor.getModel(activeModelUri.uri) const val = _model?.getValue() actions.setQueryInput(val ?? '') - actions.runQuery() + actions.runQuery(undefined, true) } }, })), diff --git a/frontend/src/scenes/error-tracking/AlphaAccessScenePrompt.tsx b/frontend/src/scenes/error-tracking/AlphaAccessScenePrompt.tsx new file mode 100644 index 00000000000000..3f9c7b2a50168a --- /dev/null +++ b/frontend/src/scenes/error-tracking/AlphaAccessScenePrompt.tsx @@ -0,0 +1,40 @@ +import { LemonButton } from '@posthog/lemon-ui' +import { useActions } from 'kea' +import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction' +import { supportLogic } from 'lib/components/Support/supportLogic' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' + +import { ProductKey } from '~/types' + +export const AlphaAccessScenePrompt = ({ children }: { children: React.ReactElement }): JSX.Element => { + const hasErrorTracking = useFeatureFlag('ERROR_TRACKING') + const { openSupportForm } = useActions(supportLogic) + + return hasErrorTracking ? ( + children + ) : ( + + openSupportForm({ + target_area: 'error_tracking', + isEmailFormOpen: true, + message: 'Hi\n\nI would like to request access to the error tracking product', + severity_level: 'low', + }) + } + > + Request access + + } + productKey={ProductKey.ERROR_TRACKING} + /> + ) +} diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx index d381d62640a0f2..33c8f2af6779e8 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx @@ -8,6 +8,7 @@ import { humanFriendlyDetailedTime } from 'lib/utils' import { useEffect, useState } from 'react' import { SceneExport } from 'scenes/sceneTypes' +import { AlphaAccessScenePrompt } from './AlphaAccessScenePrompt' import { errorTrackingSymbolSetLogic } from './errorTrackingSymbolSetLogic' import { SymbolSetUploadModal } from './SymbolSetUploadModal' @@ -25,19 +26,24 @@ export function ErrorTrackingConfigurationScene(): JSX.Element { }, [loadSymbolSets]) return ( -
-

Symbol sets

-

- Source maps are required to demangle any minified code in your exception stack traces. PostHog - automatically retrieves source maps where possible. Cases where it was not possible are listed below. - Source maps can be uploaded retroactively but changes will only apply to all future exceptions ingested. -

- {missingSymbolSets.length > 0 && ( - - )} - {validSymbolSets.length > 0 && } - -
+ +
+

Symbol sets

+

+ Source maps are required to demangle any minified code in your exception stack traces. PostHog + automatically retrieves source maps where possible. Cases where it was not possible are listed + below. Source maps can be uploaded retroactively but changes will only apply to all future + exceptions ingested. +

+ {missingSymbolSets.length > 0 && ( + + )} + {(validSymbolSets.length > 0 || missingSymbolSets.length === 0) && ( + + )} + +
+
) } diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingIssueScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingIssueScene.tsx index 7965e2563ae22b..33549cf177fa28 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingIssueScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingIssueScene.tsx @@ -8,6 +8,7 @@ import { SceneExport } from 'scenes/sceneTypes' import { ErrorTrackingIssue } from '~/queries/schema' +import { AlphaAccessScenePrompt } from './AlphaAccessScenePrompt' import { AssigneeSelect } from './AssigneeSelect' import ErrorTrackingFilters from './ErrorTrackingFilters' import { errorTrackingIssueSceneLogic } from './errorTrackingIssueSceneLogic' @@ -40,47 +41,52 @@ export function ErrorTrackingIssueScene(): JSX.Element { }, []) return ( - <> - - updateIssue({ assignee })} - type="secondary" - showName - /> -
- updateIssue({ status: 'archived' })}> - Archive - - updateIssue({ status: 'resolved' })}> - Resolve - + + <> + + updateIssue({ assignee })} + type="secondary" + showName + /> +
+ updateIssue({ status: 'archived' })} + > + Archive + + updateIssue({ status: 'resolved' })}> + Resolve + +
-
+ ) : ( + updateIssue({ status: 'active' })} + tooltip="Mark as active" + > + {STATUS_LABEL[issue.status]} + + ) ) : ( - updateIssue({ status: 'active' })} - tooltip="Mark as active" - > - {STATUS_LABEL[issue.status]} - + false ) - ) : ( - false - ) - } - /> - - - - - - + } + /> + + + + + + + ) } diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx index 754cb398ec1368..d2fb4ffc8ebb7c 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx @@ -16,6 +16,7 @@ import { ErrorTrackingIssue } from '~/queries/schema' import { QueryContext, QueryContextColumnComponent, QueryContextColumnTitleComponent } from '~/queries/types' import { InsightLogicProps } from '~/types' +import { AlphaAccessScenePrompt } from './AlphaAccessScenePrompt' import { AssigneeSelect } from './AssigneeSelect' import { errorTrackingDataNodeLogic } from './errorTrackingDataNodeLogic' import ErrorTrackingFilters from './ErrorTrackingFilters' @@ -52,14 +53,16 @@ export function ErrorTrackingScene(): JSX.Element { } return ( - -
- - - - {selectedIssueIds.length === 0 ? : } - - + + +
+ + + + {selectedIssueIds.length === 0 ? : } + + + ) } diff --git a/frontend/src/scenes/error-tracking/errorTrackingLogic.ts b/frontend/src/scenes/error-tracking/errorTrackingLogic.ts index c1a847a8ab6471..260e4e4d08ccba 100644 --- a/frontend/src/scenes/error-tracking/errorTrackingLogic.ts +++ b/frontend/src/scenes/error-tracking/errorTrackingLogic.ts @@ -27,7 +27,9 @@ const customOptions: Record = { all: [lastYear, lastMonth, lastDay], } -const DEFAULT_FILTER_GROUP = { +export const DEFAULT_ERROR_TRACKING_DATE_RANGE = { date_from: '-7d', date_to: null } + +export const DEFAULT_ERROR_TRACKING_FILTER_GROUP = { type: FilterLogicalOperator.And, values: [{ type: FilterLogicalOperator.And, values: [] }], } @@ -42,15 +44,15 @@ export const errorTrackingLogic = kea([ actions({ setDateRange: (dateRange: DateRange) => ({ dateRange }), setAssignee: (assignee: number | null) => ({ assignee }), + setSearchQuery: (searchQuery: string) => ({ searchQuery }), setFilterGroup: (filterGroup: UniversalFiltersGroup) => ({ filterGroup }), setFilterTestAccounts: (filterTestAccounts: boolean) => ({ filterTestAccounts }), - setSearchQuery: (searchQuery: string) => ({ searchQuery }), setSparklineSelectedPeriod: (period: string | null) => ({ period }), _setSparklineOptions: (options: SparklineOption[]) => ({ options }), }), reducers({ dateRange: [ - { date_from: '-7d', date_to: null } as DateRange, + DEFAULT_ERROR_TRACKING_DATE_RANGE as DateRange, { persist: true }, { setDateRange: (_, { dateRange }) => dateRange, @@ -64,7 +66,7 @@ export const errorTrackingLogic = kea([ }, ], filterGroup: [ - DEFAULT_FILTER_GROUP as UniversalFiltersGroup, + DEFAULT_ERROR_TRACKING_FILTER_GROUP as UniversalFiltersGroup, { persist: true }, { setFilterGroup: (_, { filterGroup }) => filterGroup, diff --git a/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts b/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts index bd36ead8682568..92dbd6d61dcac6 100644 --- a/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts +++ b/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts @@ -1,9 +1,17 @@ +import equal from 'fast-deep-equal' import { actions, connect, kea, path, reducers, selectors } from 'kea' +import { actionToUrl, router, urlToAction } from 'kea-router' import { subscriptions } from 'kea-subscriptions' +import { objectsEqual } from 'lib/utils' +import { Params } from 'scenes/sceneTypes' import { DataTableNode, ErrorTrackingQuery } from '~/queries/schema' -import { errorTrackingLogic } from './errorTrackingLogic' +import { + DEFAULT_ERROR_TRACKING_DATE_RANGE, + DEFAULT_ERROR_TRACKING_FILTER_GROUP, + errorTrackingLogic, +} from './errorTrackingLogic' import type { errorTrackingSceneLogicType } from './errorTrackingSceneLogicType' import { errorTrackingQuery } from './queries' @@ -23,6 +31,10 @@ export const errorTrackingSceneLogic = kea([ 'hasGroupActions', ], ], + actions: [ + errorTrackingLogic, + ['setAssignee', 'setDateRange', 'setFilterGroup', 'setSearchQuery', 'setFilterTestAccounts'], + ], }), actions({ @@ -86,4 +98,79 @@ export const errorTrackingSceneLogic = kea([ subscriptions(({ actions }) => ({ query: () => actions.setSelectedIssueIds([]), })), + + actionToUrl(({ values }) => { + const buildURL = (): [ + string, + Params, + Record, + { + replace: boolean + } + ] => { + const searchParams: Params = { + orderBy: values.orderBy, + filterTestAccounts: values.filterTestAccounts, + } + + if (values.assignee) { + searchParams.assignee = values.assignee + } + if (values.searchQuery) { + searchParams.searchQuery = values.searchQuery + } + if (!objectsEqual(values.filterGroup, DEFAULT_ERROR_TRACKING_FILTER_GROUP)) { + searchParams.filterGroup = values.filterGroup + } + if (!objectsEqual(values.dateRange, DEFAULT_ERROR_TRACKING_DATE_RANGE)) { + searchParams.dateRange = values.dateRange + } + + if (!objectsEqual(searchParams, router.values.searchParams)) { + return [router.values.location.pathname, searchParams, router.values.hashParams, { replace: true }] + } + + return [ + router.values.location.pathname, + router.values.searchParams, + router.values.hashParams, + { replace: false }, + ] + } + + return { + setOrderBy: () => buildURL(), + setAssignee: () => buildURL(), + setDateRange: () => buildURL(), + setFilterGroup: () => buildURL(), + setSearchQuery: () => buildURL(), + setFilterTestAccounts: () => buildURL(), + } + }), + + urlToAction(({ actions, values }) => { + const urlToAction = (_: any, params: Params): void => { + if (params.orderBy && !equal(params.orderBy, values.orderBy)) { + actions.setOrderBy(params.orderBy) + } + if (params.dateRange && !equal(params.dateRange, values.dateRange)) { + actions.setDateRange(params.dateRange) + } + if (params.filterGroup && !equal(params.filterGroup, values.filterGroup)) { + actions.setFilterGroup(params.filterGroup) + } + if (params.filterTestAccounts && !equal(params.filterTestAccounts, values.filterTestAccounts)) { + actions.setFilterTestAccounts(params.filterTestAccounts) + } + if (params.assignee && !equal(params.assignee, values.assignee)) { + actions.setAssignee(params.assignee) + } + if (params.searchQuery && !equal(params.searchQuery, values.searchQuery)) { + actions.setSearchQuery(params.searchQuery) + } + } + return { + '*': urlToAction, + } + }), ]) diff --git a/frontend/src/scenes/experiments/ExperimentForm.tsx b/frontend/src/scenes/experiments/ExperimentForm.tsx index d1fd0b140bc4d0..125fb2320ddab8 100644 --- a/frontend/src/scenes/experiments/ExperimentForm.tsx +++ b/frontend/src/scenes/experiments/ExperimentForm.tsx @@ -6,6 +6,7 @@ import { useActions, useValues } from 'kea' import { Form, Group } from 'kea-forms' import { ExperimentVariantNumber } from 'lib/components/SeriesGlyph' import { MAX_EXPERIMENT_VARIANTS } from 'lib/constants' +import { groupsAccessLogic, GroupsAccessStatus } from 'lib/introductions/groupsAccessLogic' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonField } from 'lib/lemon-ui/LemonField' import { LemonRadio } from 'lib/lemon-ui/LemonRadio' @@ -20,6 +21,7 @@ const ExperimentFormFields = (): JSX.Element => { const { addExperimentGroup, removeExperimentGroup, setExperiment, createExperiment, setExperimentType } = useActions(experimentLogic) const { webExperimentsAvailable } = useValues(experimentsLogic) + const { groupsAccessStatus } = useValues(groupsAccessLogic) return (
@@ -103,37 +105,40 @@ const ExperimentFormFields = (): JSX.Element => { />
)} -
-

Participant type

-
- The type on which to aggregate metrics. You can change this at any time during the experiment. -
- - { - const groupTypeIndex = rawGroupTypeIndex !== -1 ? rawGroupTypeIndex : undefined + {groupsAccessStatus === GroupsAccessStatus.AlreadyUsing && ( +
+

Participant type

+
+ The type on which to aggregate metrics. You can change this at any time during the + experiment. +
+ + { + const groupTypeIndex = rawGroupTypeIndex !== -1 ? rawGroupTypeIndex : undefined - setExperiment({ - parameters: { - ...experiment.parameters, - aggregation_group_type_index: groupTypeIndex ?? undefined, - }, - }) - }} - options={[ - { value: -1, label: 'Persons' }, - ...Array.from(groupTypes.values()).map((groupType) => ({ - value: groupType.group_type_index, - label: capitalizeFirstLetter(aggregationLabel(groupType.group_type_index).plural), - })), - ]} - /> -
+ setExperiment({ + parameters: { + ...experiment.parameters, + aggregation_group_type_index: groupTypeIndex ?? undefined, + }, + }) + }} + options={[ + { value: -1, label: 'Persons' }, + ...Array.from(groupTypes.values()).map((groupType) => ({ + value: groupType.group_type_index, + label: capitalizeFirstLetter(aggregationLabel(groupType.group_type_index).plural), + })), + ]} + /> +
+ )}

Variants

Add up to 9 variants to test against your control.
diff --git a/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx b/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx index 575eb84c527083..7f4378a7ec5efe 100644 --- a/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx @@ -1,28 +1,16 @@ import { IconInfo } from '@posthog/icons' import { Tooltip } from '@posthog/lemon-ui' import { useValues } from 'kea' -import { FEATURE_FLAGS } from 'lib/constants' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { InsightEmptyState } from 'scenes/insights/EmptyStates' import { InsightViz } from '~/queries/nodes/InsightViz/InsightViz' -import { queryFromFilters } from '~/queries/nodes/InsightViz/utils' import { CachedExperimentTrendsQueryResponse, InsightQueryNode, InsightVizNode, NodeKind } from '~/queries/schema' -import { - _TrendsExperimentResults, - BaseMathType, - ChartDisplayType, - InsightType, - PropertyFilterType, - PropertyOperator, -} from '~/types' +import { BaseMathType, ChartDisplayType, InsightType, PropertyFilterType, PropertyOperator } from '~/types' import { experimentLogic } from '../experimentLogic' -import { transformResultFilters } from '../utils' export function CumulativeExposuresChart(): JSX.Element { const { experiment, experimentResults, getMetricType } = useValues(experimentLogic) - const { featureFlags } = useValues(featureFlagLogic) const metricIdx = 0 const metricType = getMetricType(metricIdx) @@ -32,99 +20,52 @@ export function CumulativeExposuresChart(): JSX.Element { variants.push(`holdout-${experiment.holdout.id}`) } - let query + let query: InsightVizNode - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - if (metricType === InsightType.TRENDS) { - query = { - kind: NodeKind.InsightVizNode, - source: (experimentResults as CachedExperimentTrendsQueryResponse).exposure_query, - } - } else { - query = { - kind: NodeKind.InsightVizNode, - source: { - kind: NodeKind.TrendsQuery, - dateRange: { - date_from: experiment.start_date, - date_to: experiment.end_date, - }, - interval: 'day', - trendsFilter: { - display: ChartDisplayType.ActionsLineGraphCumulative, - showLegend: false, - smoothingIntervals: 1, - }, - series: [ - { - kind: NodeKind.EventsNode, - event: experiment.filters?.events?.[0]?.name, - math: BaseMathType.UniqueUsers, - properties: [ - { - key: `$feature/${experiment.feature_flag_key}`, - value: variants, - operator: PropertyOperator.Exact, - type: PropertyFilterType.Event, - }, - ], - }, - ], - breakdownFilter: { - breakdown: `$feature/${experiment.feature_flag_key}`, - breakdown_type: 'event', - }, - }, - } + if (metricType === InsightType.TRENDS) { + query = { + kind: NodeKind.InsightVizNode, + source: (experimentResults as CachedExperimentTrendsQueryResponse)?.exposure_query || { + kind: NodeKind.TrendsQuery, + series: [], + interval: 'day', + }, } } else { - if (metricType === InsightType.TRENDS && experiment.parameters?.custom_exposure_filter) { - const trendResults = experimentResults as _TrendsExperimentResults - const queryFilters = { - ...trendResults.exposure_filters, - display: ChartDisplayType.ActionsLineGraphCumulative, - } as _TrendsExperimentResults['exposure_filters'] - query = queryFromFilters(transformResultFilters(queryFilters)) - } else { - query = { - kind: NodeKind.InsightVizNode, - source: { - kind: NodeKind.TrendsQuery, - dateRange: { - date_from: experiment.start_date, - date_to: experiment.end_date, - }, - interval: 'day', - trendsFilter: { - display: ChartDisplayType.ActionsLineGraphCumulative, - showLegend: false, - smoothingIntervals: 1, - }, - series: [ - { - kind: NodeKind.EventsNode, - event: - metricType === InsightType.TRENDS - ? '$feature_flag_called' - : experiment.filters?.events?.[0]?.name, - math: BaseMathType.UniqueUsers, - properties: [ - { - key: `$feature/${experiment.feature_flag_key}`, - value: variants, - operator: PropertyOperator.Exact, - type: PropertyFilterType.Event, - }, - ], - }, - ], - breakdownFilter: { - breakdown: `$feature/${experiment.feature_flag_key}`, - breakdown_type: 'event', + query = { + kind: NodeKind.InsightVizNode, + source: { + kind: NodeKind.TrendsQuery, + dateRange: { + date_from: experiment.start_date, + date_to: experiment.end_date, + }, + interval: 'day', + trendsFilter: { + display: ChartDisplayType.ActionsLineGraphCumulative, + showLegend: false, + smoothingIntervals: 1, + }, + series: [ + { + kind: NodeKind.EventsNode, + event: experiment.filters?.events?.[0]?.name, + math: BaseMathType.UniqueUsers, + properties: [ + { + key: `$feature/${experiment.feature_flag_key}`, + value: variants, + operator: PropertyOperator.Exact, + type: PropertyFilterType.Event, + }, + ], }, + ], + breakdownFilter: { + breakdown: `$feature/${experiment.feature_flag_key}`, + breakdown_type: 'event', }, - } + }, } } @@ -139,7 +80,7 @@ export function CumulativeExposuresChart(): JSX.Element { {experiment.start_date ? ( ), + ...query, showTable: true, }} setQuery={() => {}} diff --git a/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx b/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx index 30d6b93ea82781..5ebf192769a2dc 100644 --- a/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx @@ -64,7 +64,12 @@ export function DistributionModal({ experimentId }: { experimentId: Experiment[' { saveSidebarExperimentFeatureFlag(featureFlag) - updateExperiment({ holdout_id: experiment.holdout_id }) + updateExperiment({ + holdout_id: experiment.holdout_id, + parameters: { + feature_flag_variants: featureFlag?.filters?.multivariate?.variants ?? [], + }, + }) closeDistributionModal() }} type="primary" diff --git a/frontend/src/scenes/experiments/ExperimentView/Goal.tsx b/frontend/src/scenes/experiments/ExperimentView/Goal.tsx index 867d82bc83a375..59766e480529f5 100644 --- a/frontend/src/scenes/experiments/ExperimentView/Goal.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/Goal.tsx @@ -250,6 +250,12 @@ export function Goal(): JSX.Element { const [isModalOpen, setIsModalOpen] = useState(false) const metricType = getMetricType(0) + // :FLAG: CLEAN UP AFTER MIGRATION + const isDataWarehouseMetric = + featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL] && + metricType === InsightType.TRENDS && + (experiment.metrics[0] as ExperimentTrendsQuery).count_query?.series[0].kind === NodeKind.DataWarehouseNode + return (
@@ -322,16 +328,18 @@ export function Goal(): JSX.Element { Change goal
- {metricType === InsightType.TRENDS && !experimentMathAggregationForTrends() && ( - <> - -
-
- + {metricType === InsightType.TRENDS && + !experimentMathAggregationForTrends() && + !isDataWarehouseMetric && ( + <> + +
+
+ +
-
- - )} + + )}
)} {targetResults ? countDataForVariant(targetResults, variant) : '—'}
+ const count = targetResults ? countDataForVariant(targetResults, variant) : null + return
{count === null ? '—' : humanFriendlyNumber(count)}
}, }, { title: 'Exposure', render: function Key(_, item: TabularSecondaryMetricResults): JSX.Element { const { variant } = item - return ( -
{targetResults ? exposureCountDataForVariant(targetResults, variant) : '—'}
- ) + const exposureCount = targetResults + ? exposureCountDataForVariant(targetResults, variant) + : null + return
{exposureCount === null ? '—' : humanFriendlyNumber(exposureCount)}
}, }, { diff --git a/frontend/src/scenes/experiments/ExperimentView/components.tsx b/frontend/src/scenes/experiments/ExperimentView/components.tsx index c24376ac7e67cd..df8580fee68dd0 100644 --- a/frontend/src/scenes/experiments/ExperimentView/components.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/components.tsx @@ -493,44 +493,38 @@ export function PageHeaderCustom(): JSX.Element { )} {experiment && isExperimentRunning && (
- {!isExperimentStopped && !experiment.archived && ( - <> - - - exposureCohortId ? undefined : createExposureCohort() - } - fullWidth - data-attr={`${ - exposureCohortId ? 'view' : 'create' - }-exposure-cohort`} - to={exposureCohortId ? urls.cohort(exposureCohortId) : undefined} - targetBlank={!!exposureCohortId} - > - {exposureCohortId ? 'View' : 'Create'} exposure cohort - - loadExperimentResults(true)} - fullWidth - data-attr="refresh-experiment" - > - Refresh experiment results - - loadSecondaryMetricResults(true)} - fullWidth - data-attr="refresh-secondary-metrics" - > - Refresh secondary metrics - - - } - /> - - - )} + <> + + (exposureCohortId ? undefined : createExposureCohort())} + fullWidth + data-attr={`${exposureCohortId ? 'view' : 'create'}-exposure-cohort`} + to={exposureCohortId ? urls.cohort(exposureCohortId) : undefined} + targetBlank={!!exposureCohortId} + > + {exposureCohortId ? 'View' : 'Create'} exposure cohort + + loadExperimentResults(true)} + fullWidth + data-attr="refresh-experiment" + > + Refresh experiment results + + loadSecondaryMetricResults(true)} + fullWidth + data-attr="refresh-secondary-metrics" + > + Refresh secondary metrics + + + } + /> + + {!experiment.end_date && ( ([ ...values.experiment.metrics[0], experiment_id: values.experimentId, } + if ( + queryWithExperimentId.kind === NodeKind.ExperimentTrendsQuery && + values.featureFlags[FEATURE_FLAGS.EXPERIMENT_STATS_V2] + ) { + queryWithExperimentId.stats_version = 2 + } const response = await performQuery(queryWithExperimentId, undefined, refresh) diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx index f673e27c4b4010..d7e2ad01c91330 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx @@ -70,6 +70,7 @@ import FeatureFlagProjects from './FeatureFlagProjects' import { FeatureFlagReleaseConditions } from './FeatureFlagReleaseConditions' import FeatureFlagSchedule from './FeatureFlagSchedule' import { featureFlagsLogic, FeatureFlagsTab } from './featureFlagsLogic' +import { FeatureFlagStatusIndicator } from './FeatureFlagStatusIndicator' import { RecentFeatureFlagInsights } from './RecentFeatureFlagInsightsCard' export const scene: SceneExport = { @@ -382,7 +383,8 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element {
If your feature flag is applied before identifying the user, use this to ensure that the flag value remains consistent for the same user. - Depending on your setup, this option might not always be suitable.{' '} + Depending on your setup, this option might not always be suitable. This + feature requires creating profiles for anonymous users.{' '} ) : ( - { - LemonDialog.open({ - title: `${newValue === true ? 'Enable' : 'Disable'} this flag?`, - description: `This flag will be immediately ${ - newValue === true ? 'rolled out to' : 'rolled back from' - } the users matching the release conditions.`, - primaryButton: { - children: 'Confirm', - type: 'primary', - onClick: () => { - const updatedFlag = { ...featureFlag, active: newValue } - setFeatureFlag(updatedFlag) - saveFeatureFlag(updatedFlag) +
+ { + LemonDialog.open({ + title: `${newValue === true ? 'Enable' : 'Disable'} this flag?`, + description: `This flag will be immediately ${ + newValue === true ? 'rolled out to' : 'rolled back from' + } the users matching the release conditions.`, + primaryButton: { + children: 'Confirm', + type: 'primary', + onClick: () => { + const updatedFlag = { ...featureFlag, active: newValue } + setFeatureFlag(updatedFlag) + saveFeatureFlag(updatedFlag) + }, + size: 'small', }, - size: 'small', - }, - secondaryButton: { - children: 'Cancel', - type: 'tertiary', - size: 'small', - }, - }) - }} - label="Enabled" - disabledReason={ - !featureFlag.can_edit - ? "You only have view access to this feature flag. To make changes, contact the flag's creator." - : null - } - checked={featureFlag.active} - /> + secondaryButton: { + children: 'Cancel', + type: 'tertiary', + size: 'small', + }, + }) + }} + label="Enabled" + disabledReason={ + !featureFlag.can_edit + ? "You only have view access to this feature flag. To make changes, contact the flag's creator." + : null + } + checked={featureFlag.active} + /> + +
)}
diff --git a/frontend/src/scenes/feature-flags/FeatureFlagStatusIndicator.tsx b/frontend/src/scenes/feature-flags/FeatureFlagStatusIndicator.tsx new file mode 100644 index 00000000000000..7b2ee5fc7fa14e --- /dev/null +++ b/frontend/src/scenes/feature-flags/FeatureFlagStatusIndicator.tsx @@ -0,0 +1,45 @@ +import { LemonTag } from 'lib/lemon-ui/LemonTag' +import { Tooltip } from 'lib/lemon-ui/Tooltip' + +import { FeatureFlagStatus, FeatureFlagStatusResponse } from '~/types' + +export function FeatureFlagStatusIndicator({ + flagStatus, +}: { + flagStatus: FeatureFlagStatusResponse | null +}): JSX.Element | null { + if ( + !flagStatus || + [ + FeatureFlagStatus.ACTIVE, + FeatureFlagStatus.INACTIVE, + FeatureFlagStatus.DELETED, + FeatureFlagStatus.UNKNOWN, + ].includes(flagStatus.status) + ) { + return null + } + + return ( + +
{flagStatus.reason}
+
+ {flagStatus.status === FeatureFlagStatus.STALE && + 'Make sure to remove any references to this flag in your code before deleting it.'} + {flagStatus.status === FeatureFlagStatus.INACTIVE && + 'It is probably not being used in your code, but be sure to remove any references to this flag before deleting it.'} +
+ + } + placement="right" + > + + + {flagStatus.status} + + +
+ ) +} diff --git a/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx b/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx index acf32b9788ed5e..b929e2d203f19a 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx @@ -9,6 +9,7 @@ import { mswDecorator } from '~/mocks/browser' import featureFlags from './__mocks__/feature_flags.json' const meta: Meta = { + tags: ['ff'], title: 'Scenes-App/Feature Flags', parameters: { layout: 'fullscreen', @@ -33,6 +34,13 @@ const meta: Meta = { 200, featureFlags.results.find((r) => r.id === Number(req.params['flagId'])), ], + '/api/projects/:team_id/feature_flags/:flagId/status': () => [ + 200, + { + status: 'active', + reason: 'Feature flag is active', + }, + ], }, post: { '/api/environments/:team_id/query': {}, diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 3b4f69787fc409..978348e7951493 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -33,6 +33,7 @@ import { EarlyAccessFeatureType, FeatureFlagGroupType, FeatureFlagRollbackConditions, + FeatureFlagStatusResponse, FeatureFlagType, FilterLogicalOperator, FilterType, @@ -755,6 +756,18 @@ export const featureFlagLogic = kea([ } }, }, + flagStatus: [ + null as FeatureFlagStatusResponse | null, + { + loadFeatureFlagStatus: () => { + const { currentTeamId } = values + if (currentTeamId && props.id && props.id !== 'new' && props.id !== 'link') { + return api.featureFlags.getStatus(currentTeamId, props.id) + } + return null + }, + }, + ], })), listeners(({ actions, values, props }) => ({ submitNewDashboardSuccessWithResult: async ({ result }) => { @@ -1040,8 +1053,10 @@ export const featureFlagLogic = kea([ actions.setFeatureFlag(formatPayloadsWithFlag) actions.loadRelatedInsights() actions.loadAllInsightsForFlag() + actions.loadFeatureFlagStatus() } else if (props.id !== 'new') { actions.loadFeatureFlag() + actions.loadFeatureFlagStatus() } }), ]) diff --git a/frontend/src/scenes/funnels/FunnelHistogram.tsx b/frontend/src/scenes/funnels/FunnelHistogram.tsx index 1709b0eb514b46..39a157b7b878eb 100644 --- a/frontend/src/scenes/funnels/FunnelHistogram.tsx +++ b/frontend/src/scenes/funnels/FunnelHistogram.tsx @@ -29,6 +29,7 @@ export function FunnelHistogram(): JSX.Element | null {
: } + const settingLevel = featureFlags[FEATURE_FLAGS.ENVIRONMENTS] ? 'environment' : 'project' + return ( <> - Session recordings are currently disabled for this project. To use this - feature, please go to your{' '} + Session recordings are currently disabled for this {settingLevel}. To use + this feature, please go to your{' '} project settings{' '} and enable it. diff --git a/frontend/src/scenes/insights/EmptyStates/EmptyStates.scss b/frontend/src/scenes/insights/EmptyStates/EmptyStates.scss index ddae1edbb3113a..de2ede18a2fb8a 100644 --- a/frontend/src/scenes/insights/EmptyStates/EmptyStates.scss +++ b/frontend/src/scenes/insights/EmptyStates/EmptyStates.scss @@ -38,7 +38,9 @@ } h2 { + width: 100%; text-align: center; + word-wrap: break-word; } ol { diff --git a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx index 06f0928dc54f36..da29f417bf1f3a 100644 --- a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx +++ b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx @@ -31,7 +31,7 @@ import { urls } from 'scenes/urls' import { actionsAndEventsToSeries } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' import { seriesToActionsAndEvents } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' -import { FunnelsQuery, Node } from '~/queries/schema' +import { FunnelsQuery, Node, QueryStatus } from '~/queries/schema' import { FilterType, InsightLogicProps, SavedInsightsTabs } from '~/types' import { samplingFilterLogic } from '../EditorFilters/samplingFilterLogic' @@ -82,25 +82,22 @@ function humanFileSize(size: number): string { return (+(size / Math.pow(1024, i))).toFixed(2) + ' ' + ['B', 'kB', 'MB', 'GB', 'TB'][i] } -export function InsightLoadingState({ +export function StatelessInsightLoadingState({ queryId, - insightProps, + pollResponse, + suggestion, }: { queryId?: string | null - insightProps: InsightLogicProps + pollResponse?: Record | null + suggestion?: JSX.Element }): JSX.Element { - const { suggestedSamplingPercentage, samplingPercentage } = useValues(samplingFilterLogic(insightProps)) - const { insightPollResponse } = useValues(insightDataLogic(insightProps)) - - const { currentTeam } = useValues(teamLogic) - const [rowsRead, setRowsRead] = useState(0) const [bytesRead, setBytesRead] = useState(0) const [secondsElapsed, setSecondsElapsed] = useState(0) useEffect(() => { - const status = insightPollResponse?.status?.query_progress - const previousStatus = insightPollResponse?.previousStatus?.query_progress + const status = pollResponse?.status?.query_progress + const previousStatus = pollResponse?.previousStatus?.query_progress setRowsRead(previousStatus?.rows_read || 0) setBytesRead(previousStatus?.bytes_read || 0) const interval = setInterval(() => { @@ -113,21 +110,21 @@ export function InsightLoadingState({ return Math.min(bytesRead + diff / 30, status?.bytes_read || 0) }) setSecondsElapsed(() => { - return dayjs().diff(dayjs(insightPollResponse?.status?.start_time), 'milliseconds') + return dayjs().diff(dayjs(pollResponse?.status?.start_time), 'milliseconds') }) }, 100) return () => clearInterval(interval) - }, [insightPollResponse]) + }, [pollResponse]) + const bytesPerSecond = (bytesRead / (secondsElapsed || 1)) * 1000 - const estimatedRows = insightPollResponse?.status?.query_progress?.estimated_rows_total + const estimatedRows = pollResponse?.status?.query_progress?.estimated_rows_total const cpuUtilization = - (insightPollResponse?.status?.query_progress?.active_cpu_time || 0) / - (insightPollResponse?.status?.query_progress?.time_elapsed || 1) / + (pollResponse?.status?.query_progress?.active_cpu_time || 0) / + (pollResponse?.status?.query_progress?.time_elapsed || 1) / 10000 - currentTeam?.modifiers?.personsOnEventsMode ?? currentTeam?.default_modifiers?.personsOnEventsMode ?? 'disabled' return (
@@ -148,37 +145,14 @@ export function InsightLoadingState({ )}

-
- {currentTeam?.modifiers?.personsOnEventsMode === 'person_id_override_properties_joined' ? ( - <> - -

- You can speed this query up by changing the{' '} - person properties mode setting. -

- - ) : ( - <> - -

- {suggestedSamplingPercentage && !samplingPercentage ? ( - - Need to speed things up? Try reducing the date range, removing breakdowns, or - turning on . - - ) : suggestedSamplingPercentage && samplingPercentage ? ( - <> - Still waiting around? You must have lots of data! Kick it up a notch with{' '} - . Or try reducing the date range and - removing breakdowns. - - ) : ( - <>Need to speed things up? Try reducing the date range or removing breakdowns. - )} -

- - )} -
+ {suggestion ? ( + suggestion + ) : ( +
+ +

Need to speed things up? Try reducing the date range.

+
+ )} {queryId ? (
Query ID: {queryId} @@ -189,6 +163,66 @@ export function InsightLoadingState({ ) } +export function InsightLoadingState({ + queryId, + insightProps, +}: { + queryId?: string | null + insightProps: InsightLogicProps +}): JSX.Element { + const { suggestedSamplingPercentage, samplingPercentage } = useValues(samplingFilterLogic(insightProps)) + const { insightPollResponse } = useValues(insightDataLogic(insightProps)) + const { currentTeam } = useValues(teamLogic) + + const personsOnEventsMode = + currentTeam?.modifiers?.personsOnEventsMode ?? currentTeam?.default_modifiers?.personsOnEventsMode ?? 'disabled' + + return ( +
+ + {personsOnEventsMode === 'person_id_override_properties_joined' ? ( + <> + +

+ You can speed this query up by changing the{' '} + person properties mode{' '} + setting. +

+ + ) : ( + <> + +

+ {suggestedSamplingPercentage && !samplingPercentage ? ( + + Need to speed things up? Try reducing the date range, removing breakdowns, + or turning on . + + ) : suggestedSamplingPercentage && samplingPercentage ? ( + <> + Still waiting around? You must have lots of data! Kick it up a notch with{' '} + . Or try reducing the date range + and removing breakdowns. + + ) : ( + <> + Need to speed things up? Try reducing the date range or removing breakdowns. + + )} +

+ + )} +
+ } + /> +
+ ) +} + export function InsightTimeoutState({ queryId }: { queryId?: string | null }): JSX.Element { const { openSupportForm } = useActions(supportLogic) diff --git a/frontend/src/scenes/insights/insightVizDataLogic.ts b/frontend/src/scenes/insights/insightVizDataLogic.ts index aec4a1eb32ed88..14b0b4cbd393d7 100644 --- a/frontend/src/scenes/insights/insightVizDataLogic.ts +++ b/frontend/src/scenes/insights/insightVizDataLogic.ts @@ -100,6 +100,7 @@ export const insightVizDataLogic = kea([ updateDisplay: (display: ChartDisplayType | undefined) => ({ display }), updateHiddenLegendIndexes: (hiddenLegendIndexes: number[] | undefined) => ({ hiddenLegendIndexes }), setTimedOutQueryId: (id: string | null) => ({ id }), + setIsIntervalManuallySet: (isIntervalManuallySet: boolean) => ({ isIntervalManuallySet }), }), reducers({ @@ -109,6 +110,18 @@ export const insightVizDataLogic = kea([ setTimedOutQueryId: (_, { id }) => id, }, ], + + // Whether the interval has been manually set by the user. If true, prevents auto-adjusting the interval when date range changes. Reference: https://github.com/PostHog/posthog/issues/22785 + isIntervalManuallySet: [ + false, + { + updateQuerySource: (state, { querySource }) => { + // If interval is explicitly included in the update, mark it as manually set + return 'interval' in querySource ? true : state + }, + setIsIntervalManuallySet: (_, { isIntervalManuallySet }) => isIntervalManuallySet, + }, + ], }), selectors({ @@ -332,7 +345,7 @@ export const insightVizDataLogic = kea([ // We use 512 for query timeouts // Async queries put the error message on data.error_message, while synchronous ones use detail return insightDataError?.status === 400 || insightDataError?.status === 512 - ? (insightDataError.detail || insightDataError.data?.error_message)?.replace('Try ', 'Try ') // Add unbreakable space for better line breaking + ? (insightDataError.detail || insightDataError.data?.error_message)?.replace('Try ', 'Try ') // Add unbreakable space for better line breaking : null }, ], @@ -401,7 +414,11 @@ export const insightVizDataLogic = kea([ ...values.query, source: { ...values.querySource, - ...handleQuerySourceUpdateSideEffects(querySource, values.querySource as InsightQueryNode), + ...handleQuerySourceUpdateSideEffects( + querySource, + values.querySource as InsightQueryNode, + values.isIntervalManuallySet + ), }, } as Node) }, @@ -487,7 +504,8 @@ const getActiveUsersMath = ( const handleQuerySourceUpdateSideEffects = ( update: QuerySourceUpdate, - currentState: InsightQueryNode + currentState: InsightQueryNode, + isIntervalManuallySet: boolean ): QuerySourceUpdate => { const mergedUpdate = { ...update } as InsightQueryNode @@ -536,7 +554,8 @@ const handleQuerySourceUpdateSideEffects = ( update.dateRange && update.dateRange.date_from && (update.dateRange.date_from !== currentState.dateRange?.date_from || - update.dateRange.date_to !== currentState.dateRange?.date_to) + update.dateRange.date_to !== currentState.dateRange?.date_to) && + !isIntervalManuallySet // Only auto-adjust interval if not manually set ) { const { date_from, date_to } = { ...currentState.dateRange, ...update.dateRange } diff --git a/frontend/src/scenes/persons/PersonScene.tsx b/frontend/src/scenes/persons/PersonScene.tsx index d32f7ec1076767..aa8e74a9762660 100644 --- a/frontend/src/scenes/persons/PersonScene.tsx +++ b/frontend/src/scenes/persons/PersonScene.tsx @@ -7,11 +7,13 @@ import { NotFound } from 'lib/components/NotFound' import { PageHeader } from 'lib/components/PageHeader' import { PropertiesTable } from 'lib/components/PropertiesTable' import { TZLabel } from 'lib/components/TZLabel' +import { FEATURE_FLAGS } from 'lib/constants' import { groupsAccessLogic } from 'lib/introductions/groupsAccessLogic' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { LemonTabs } from 'lib/lemon-ui/LemonTabs' import { SpinnerOverlay } from 'lib/lemon-ui/Spinner/Spinner' import { Tooltip } from 'lib/lemon-ui/Tooltip' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { copyToClipboard } from 'lib/utils/copyToClipboard' import { RelatedGroups } from 'scenes/groups/RelatedGroups' import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' @@ -106,6 +108,7 @@ export function PersonScene(): JSX.Element | null { splitMergeModalShown, urlId, distinctId, + primaryDistinctId, } = useValues(personsLogic) const { loadPersons, editProperty, deleteProperty, navigateToTab, setSplitMergeModalShown, setDistinctId } = useActions(personsLogic) @@ -113,6 +116,7 @@ export function PersonScene(): JSX.Element | null { const { deletedPersonLoading } = useValues(personDeleteModalLogic) const { groupsEnabled } = useValues(groupsAccessLogic) const { currentTeam } = useValues(teamLogic) + const { featureFlags } = useValues(featureFlagLogic) if (personError) { throw new Error(personError) @@ -122,6 +126,7 @@ export function PersonScene(): JSX.Element | null { } const url = urls.personByDistinctId(urlId || person.distinct_ids[0] || String(person.id)) + const settingLevel = featureFlags[FEATURE_FLAGS.ENVIRONMENTS] ? 'environment' : 'project' return ( <> @@ -226,8 +231,8 @@ export function PersonScene(): JSX.Element | null { {!currentTeam?.session_recording_opt_in ? (
- Session recordings are currently disabled for this project. To use this - feature, please go to your{' '} + Session recordings are currently disabled for this {settingLevel}. To use + this feature, please go to your{' '} project settings{' '} and enable it. @@ -293,7 +298,7 @@ export function PersonScene(): JSX.Element | null {
value && setDistinctId(value)} options={person.distinct_ids.map((distinct_id) => ({ label: distinct_id, diff --git a/frontend/src/scenes/persons/personsLogic.tsx b/frontend/src/scenes/persons/personsLogic.tsx index 7d92c0593eb546..d408ec3a74ed02 100644 --- a/frontend/src/scenes/persons/personsLogic.tsx +++ b/frontend/src/scenes/persons/personsLogic.tsx @@ -286,6 +286,24 @@ export const personsLogic = kea([ (featureFlags) => featureFlags[FEATURE_FLAGS.CS_DASHBOARDS], ], feedEnabled: [(s) => [s.featureFlags], (featureFlags) => !!featureFlags[FEATURE_FLAGS.PERSON_FEED_CANVAS]], + primaryDistinctId: [ + (s) => [s.person], + (person): string | null => { + // We do not track which distinct ID was created through identify, but we can try to guess + const nonUuidDistinctIds = person?.distinct_ids.filter((id) => id?.split('-').length !== 5) + + if (nonUuidDistinctIds && nonUuidDistinctIds?.length >= 1) { + /** + * If there are one or more distinct IDs that are not a UUID, one of them is most likely + * the identified ID. In most cases, there would be only one non-UUID distinct ID. + */ + return nonUuidDistinctIds[0] + } + + // Otherwise, just fall back to the default first distinct ID + return person?.distinct_ids[0] || null + }, + ], })), listeners(({ actions, values }) => ({ editProperty: async ({ key, newValue }) => { diff --git a/frontend/src/scenes/pipeline/Transformations.tsx b/frontend/src/scenes/pipeline/Transformations.tsx index bd62e177b69f84..586e562f3a42c4 100644 --- a/frontend/src/scenes/pipeline/Transformations.tsx +++ b/frontend/src/scenes/pipeline/Transformations.tsx @@ -4,6 +4,7 @@ import { arrayMove, SortableContext, useSortable, verticalListSortingStrategy } import { CSS } from '@dnd-kit/utilities' import { LemonBadge, LemonButton, LemonModal, LemonTable, LemonTableColumn, Link } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' +import { FlaggedFeature } from 'lib/components/FlaggedFeature' import { PageHeader } from 'lib/components/PageHeader' import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction' import { More } from 'lib/lemon-ui/LemonButton/More' @@ -14,6 +15,8 @@ import { urls } from 'scenes/urls' import { PipelineNodeTab, PipelineStage, ProductKey } from '~/types' import { AppMetricSparkLine } from './AppMetricSparkLine' +import { TRANSFORMATION_TYPES } from './destinations/constants' +import { Destinations } from './destinations/Destinations' import { NewButton } from './NewButton' import { pipelineAccessLogic } from './pipelineAccessLogic' import { PluginImage } from './PipelinePluginImage' @@ -61,7 +64,13 @@ export function Transformations(): JSX.Element {
)} + + + +

Experimental transformations

+ +
) } @@ -238,7 +247,7 @@ const MinimalAppView = ({ transformation, order }: { transformation: Transformat return (
} /> - ) : ( - } - /> - )} + ) : null}
@@ -72,7 +67,9 @@ export function Destinations({ types }: DestinationsProps): JSX.Element { ? 'New destinations' : types.includes('site_app') ? 'New site app' - : 'New Hog function'} + : types.includes('transformation') + ? 'New transformation' + : 'New'} {/* Old site-apps until we migrate everyone onto the new ones */} @@ -169,7 +166,7 @@ export function DestinationsTable({ render: function RenderFrequency(_, destination) { return 'interval' in destination ? destination.interval : null }, - } as LemonTableColumn, + } as LemonTableColumn, ] : []), ...(showFrequencyHistory @@ -193,10 +190,10 @@ export function DestinationsTable({ ) }, - } as LemonTableColumn, + } as LemonTableColumn, ] : []), - updatedAtColumn() as LemonTableColumn, + updatedAtColumn() as LemonTableColumn, { title: 'Status', key: 'enabled', diff --git a/frontend/src/scenes/pipeline/destinations/constants.ts b/frontend/src/scenes/pipeline/destinations/constants.ts index dda2e7d0fe3d09..2d353802e5328b 100644 --- a/frontend/src/scenes/pipeline/destinations/constants.ts +++ b/frontend/src/scenes/pipeline/destinations/constants.ts @@ -2,3 +2,4 @@ import { HogFunctionTypeType } from '~/types' export const DESTINATION_TYPES = ['destination', 'site_destination'] satisfies HogFunctionTypeType[] export const SITE_APP_TYPES = ['site_app'] satisfies HogFunctionTypeType[] +export const TRANSFORMATION_TYPES = ['transformation'] satisfies HogFunctionTypeType[] diff --git a/frontend/src/scenes/pipeline/destinations/destinationsLogic.tsx b/frontend/src/scenes/pipeline/destinations/destinationsLogic.tsx index 4630e26009cf0a..42f8112f99b6cd 100644 --- a/frontend/src/scenes/pipeline/destinations/destinationsLogic.tsx +++ b/frontend/src/scenes/pipeline/destinations/destinationsLogic.tsx @@ -20,6 +20,7 @@ import { PluginType, } from '~/types' +import { hogFunctionTypeToPipelineStage } from '../hogfunctions/urls' import { pipelineAccessLogic } from '../pipelineAccessLogic' import { BatchExportDestination, @@ -28,6 +29,7 @@ import { FunctionDestination, PipelineBackend, SiteApp, + Transformation, WebhookDestination, } from '../types' import { captureBatchExportEvent, capturePluginEvent, loadPluginsFromUrl } from '../utils' @@ -35,7 +37,7 @@ import { destinationsFiltersLogic } from './destinationsFiltersLogic' import type { pipelineDestinationsLogicType } from './destinationsLogicType' // Helping kea-typegen navigate the exported default class for Fuse -export interface Fuse extends FuseClass {} +export interface Fuse extends FuseClass {} export interface PipelineDestinationsLogicProps { types: HogFunctionTypeType[] @@ -60,9 +62,12 @@ export const pipelineDestinationsLogic = kea([ ], })), actions({ - toggleNode: (destination: Destination | SiteApp, enabled: boolean) => ({ destination, enabled }), + toggleNode: (destination: Destination | SiteApp | Transformation, enabled: boolean) => ({ + destination, + enabled, + }), toggleNodeHogFunction: (destination: FunctionDestination, enabled: boolean) => ({ destination, enabled }), - deleteNode: (destination: Destination | SiteApp) => ({ destination }), + deleteNode: (destination: Destination | SiteApp | Transformation) => ({ destination }), deleteNodeBatchExport: (destination: BatchExportDestination) => ({ destination }), deleteNodeHogFunction: (destination: FunctionDestination) => ({ destination }), deleteNodeWebhook: (destination: WebhookDestination) => ({ destination }), @@ -240,7 +245,7 @@ export const pipelineDestinationsLogic = kea([ hogFunctions, user, featureFlags - ): (Destination | SiteApp)[] => { + ): (Destination | Transformation | SiteApp)[] => { // Migrations are shown only in impersonation mode, for us to be able to trigger them. const httpEnabled = featureFlags[FEATURE_FLAGS.BATCH_EXPORTS_POSTHOG_HTTP] || user?.is_impersonated || user?.is_staff @@ -262,7 +267,7 @@ export const pipelineDestinationsLogic = kea([ const convertedDestinations = rawDestinations.map((d) => convertToPipelineNode( d, - 'type' in d && d.type === 'site_app' ? PipelineStage.SiteApp : PipelineStage.Destination + 'type' in d ? hogFunctionTypeToPipelineStage(d.type) : PipelineStage.Destination ) ) const enabledFirst = convertedDestinations.sort((a, b) => Number(b.enabled) - Number(a.enabled)) @@ -281,7 +286,7 @@ export const pipelineDestinationsLogic = kea([ filteredDestinations: [ (s) => [s.filters, s.destinations, s.destinationsFuse], - (filters, destinations, destinationsFuse): (Destination | SiteApp)[] => { + (filters, destinations, destinationsFuse): (Destination | Transformation | SiteApp)[] => { const { search, showPaused, kind } = filters return (search ? destinationsFuse.search(search).map((x) => x.item) : destinations).filter((dest) => { @@ -298,7 +303,7 @@ export const pipelineDestinationsLogic = kea([ hiddenDestinations: [ (s) => [s.destinations, s.filteredDestinations], - (destinations, filteredDestinations): (Destination | SiteApp)[] => { + (destinations, filteredDestinations): (Destination | Transformation | SiteApp)[] => { return destinations.filter((dest) => !filteredDestinations.includes(dest)) }, ], diff --git a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx index f837bc49fe7b35..cfa5dc06d463c0 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx @@ -35,6 +35,7 @@ import { HogFunctionIconEditable } from './HogFunctionIcon' import { HogFunctionInputs } from './HogFunctionInputs' import { HogFunctionStatusIndicator } from './HogFunctionStatusIndicator' import { HogFunctionTest, HogFunctionTestPlaceholder } from './HogFunctionTest' +import { HogFunctionMapping } from './mapping/HogFunctionMapping' const EVENT_THRESHOLD_ALERT_LEVEL = 8000 @@ -151,14 +152,13 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur return } - const showFilters = type === 'destination' || type === 'site_destination' || type === 'broadcast' - const showExpectedVolume = type === 'destination' || type === 'site_destination' - const showStatus = type === 'destination' || type === 'email' - const showEnabled = type === 'destination' || type === 'email' || type === 'site_destination' || type === 'site_app' - const canEditSource = - type === 'destination' || type === 'email' || type === 'site_destination' || type === 'site_app' - const showPersonsCount = type === 'broadcast' - const showTesting = type === 'destination' || type === 'broadcast' || type === 'email' + const showFilters = ['destination', 'site_destination', 'broadcast', 'transformation'].includes(type) + const showExpectedVolume = ['destination', 'site_destination'].includes(type) + const showStatus = ['destination', 'email', 'transformation'].includes(type) + const showEnabled = ['destination', 'email', 'site_destination', 'site_app', 'transformation'].includes(type) + const canEditSource = ['destination', 'email', 'site_destination', 'site_app', 'transformation'].includes(type) + const showPersonsCount = ['broadcast'].includes(type) + const showTesting = ['destination', 'transformation', 'broadcast', 'email'].includes(type) return (
@@ -194,10 +194,10 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur formKey="configuration" className="space-y-3" > -
-
-
-
+
+
+
+
{({ value, onChange }) => ( -
+
{configuration.name} {template && }
@@ -243,14 +243,14 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur +

This function was built from the template{' '} {hogFunction.template.name}. If the template is updated, this function is not affected unless you choose to update it.

-
+
Close
@@ -271,8 +271,8 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur
} > -
- +
+ Built from template: {hogFunction?.template.name} @@ -288,7 +288,7 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur {showFilters && } {showPersonsCount && ( -
+
Matching persons
@@ -318,7 +318,7 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur )} {showExpectedVolume && ( -
+
Expected volume {sparkline && !sparklineLoading ? ( <> @@ -358,10 +358,10 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur )}
-
+
{!forcedSubTemplateId && template?.sub_templates && ( <> -
+
Choose template +
{subTemplate.name}
-
+
{subTemplate.description}
@@ -394,9 +394,12 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur )} -
+
- + {showSource && canEditSource ? ( } @@ -421,6 +424,8 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur
+ + {canEditSource && (
-
+

Edit source

{!showSource ?

Click here to edit the function's source code

: null} @@ -501,7 +506,7 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur ) ) : null} -
{saveButtons}
+
{saveButtons}
diff --git a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionInputs.tsx b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionInputs.tsx index 1a5a517ec06457..8cbb21044fc1a9 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionInputs.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionInputs.tsx @@ -14,14 +14,19 @@ import { LemonTextArea, Tooltip, } from '@posthog/lemon-ui' -import { useActions, useValues } from 'kea' +import { useValues } from 'kea' import { LemonField } from 'lib/lemon-ui/LemonField' import { CodeEditorInline, CodeEditorInlineProps } from 'lib/monaco/CodeEditorInline' import { CodeEditorResizeable } from 'lib/monaco/CodeEditorResizable' import { capitalizeFirstLetter } from 'lib/utils' import { useEffect, useState } from 'react' -import { HogFunctionInputSchemaType, HogFunctionInputType } from '~/types' +import { + HogFunctionConfigurationType, + HogFunctionInputSchemaType, + HogFunctionInputType, + HogFunctionMappingType, +} from '~/types' import { EmailTemplater } from './email-templater/EmailTemplater' import { hogFunctionConfigurationLogic } from './hogFunctionConfigurationLogic' @@ -35,7 +40,14 @@ export type HogFunctionInputProps = { disabled?: boolean } +export interface HogFunctionInputsProps { + configuration: HogFunctionConfigurationType | HogFunctionMappingType + setConfigurationValue: (key: string, value: any) => void +} + export type HogFunctionInputWithSchemaProps = { + configuration: HogFunctionConfigurationType | HogFunctionMappingType + setConfigurationValue: (key: string, value: any) => void schema: HogFunctionInputSchemaType } @@ -196,9 +208,15 @@ type HogFunctionInputSchemaControlsProps = { value: HogFunctionInputSchemaType onChange: (value: HogFunctionInputSchemaType | null) => void onDone: () => void + supportsSecrets: boolean } -function HogFunctionInputSchemaControls({ value, onChange, onDone }: HogFunctionInputSchemaControlsProps): JSX.Element { +function HogFunctionInputSchemaControls({ + value, + onChange, + onDone, + supportsSecrets, +}: HogFunctionInputSchemaControlsProps): JSX.Element { const _onChange = (data: Partial | null): void => { if (data?.key?.length === 0) { setLocalVariableError('Input variable name cannot be empty') @@ -230,13 +248,15 @@ function HogFunctionInputSchemaControls({ value, onChange, onDone }: HogFunction label="Required" bordered /> - _onChange({ secret })} - label="Secret" - bordered - /> + {supportsSecrets ? ( + _onChange({ secret })} + label="Secret" + bordered + /> + ) : null}
} size="small" onClick={() => onChange(null)} /> onDone()}> @@ -314,10 +334,13 @@ function HogFunctionInputSchemaControls({ value, onChange, onDone }: HogFunction ) } -export function HogFunctionInputWithSchema({ schema }: HogFunctionInputWithSchemaProps): JSX.Element { +export function HogFunctionInputWithSchema({ + schema, + configuration, + setConfigurationValue, +}: HogFunctionInputWithSchemaProps): JSX.Element { const { attributes, listeners, setNodeRef, transform, transition } = useSortable({ id: schema.key }) - const { showSource, configuration } = useValues(hogFunctionConfigurationLogic) - const { setConfigurationValue } = useActions(hogFunctionConfigurationLogic) + const { showSource } = useValues(hogFunctionConfigurationLogic) const [editing, setEditing] = useState(false) const value = configuration.inputs?.[schema.key] @@ -349,6 +372,7 @@ export function HogFunctionInputWithSchema({ schema }: HogFunctionInputWithSchem }, [showSource]) const supportsTemplating = ['string', 'json', 'dictionary', 'email'].includes(schema.type) + const supportsSecrets = 'type' in configuration // no secrets for mapping inputs return (
setEditing(false)} + supportsSecrets={supportsSecrets} />
)} @@ -451,11 +476,17 @@ export function HogFunctionInputWithSchema({ schema }: HogFunctionInputWithSchem ) } -export function HogFunctionInputs(): JSX.Element { - const { showSource, configuration } = useValues(hogFunctionConfigurationLogic) - const { setConfigurationValue } = useActions(hogFunctionConfigurationLogic) +export function HogFunctionInputs({ + configuration, + setConfigurationValue, +}: HogFunctionInputsProps): JSX.Element | null { + const { showSource } = useValues(hogFunctionConfigurationLogic) if (!configuration?.inputs_schema?.length) { + if (!('type' in configuration)) { + // If this is a mapping, don't show any error message. + return null + } return This function does not require any input variables. } @@ -477,7 +508,14 @@ export function HogFunctionInputs(): JSX.Element { > {configuration.inputs_schema?.map((schema) => { - return + return ( + + ) })} diff --git a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionTest.tsx b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionTest.tsx index db83344bcbcf14..1861b06f369ed8 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionTest.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionTest.tsx @@ -102,16 +102,14 @@ export function HogFunctionTest(props: HogFunctionTestLogicProps): JSX.Element {
) : ( <> - {type === 'destination' ? ( - - Refresh globals - - ) : null} + + Refresh globals + {({ value, onChange }) => ( +
{({ value, onChange }) => ( - - {({ value, onChange }) => ( - <> - onChange({ ...value, filter_test_accounts })} - fullWidth - /> - { - onChange({ - ...value, - properties, - }) - }} - pageKey={`HogFunctionPropertyFilters.${id}`} - /> +
+ + {({ value, onChange }) => { + const filters = (value ?? {}) as HogFunctionFiltersType + return ( + <> + onChange({ ...filters, filter_test_accounts })} + fullWidth + /> + { + onChange({ + ...filters, + properties, + }) + }} + pageKey={`HogFunctionPropertyFilters.${id}`} + /> - Match event and actions -

- If set, the destination will only run if the event matches any of the below. -

- { - onChange({ - ...value, - ...sanitizeActionFilters(payload), - }) - }} - typeKey="plugin-filters" - mathAvailability={MathAvailability.None} - hideRename - hideDuplicate - showNestedArrow={false} - actionsTaxonomicGroupTypes={[ - TaxonomicFilterGroupType.Events, - TaxonomicFilterGroupType.Actions, - ]} - propertiesTaxonomicGroupTypes={[ - TaxonomicFilterGroupType.EventProperties, - TaxonomicFilterGroupType.EventFeatureFlags, - TaxonomicFilterGroupType.Elements, - TaxonomicFilterGroupType.PersonProperties, - TaxonomicFilterGroupType.HogQLExpression, - ...groupsTaxonomicTypes, - ]} - propertyFiltersPopover - addFilterDefaultOptions={{ - id: '$pageview', - name: '$pageview', - type: EntityTypes.EVENTS, - }} - buttonCopy="Add event matcher" - /> - - )} -
+ {!useMapping ? ( + <> +
+ Match events and actions +
+

+ If set, the destination will only run if the event matches any of the + below. +

+ { + onChange({ + ...value, + ...sanitizeActionFilters(payload), + }) + }} + typeKey="plugin-filters" + mathAvailability={MathAvailability.None} + hideRename + hideDuplicate + showNestedArrow={false} + actionsTaxonomicGroupTypes={[ + TaxonomicFilterGroupType.Events, + TaxonomicFilterGroupType.Actions, + ]} + propertiesTaxonomicGroupTypes={[ + TaxonomicFilterGroupType.EventProperties, + TaxonomicFilterGroupType.EventFeatureFlags, + TaxonomicFilterGroupType.Elements, + TaxonomicFilterGroupType.PersonProperties, + TaxonomicFilterGroupType.HogQLExpression, + ...groupsTaxonomicTypes, + ]} + propertyFiltersPopover + addFilterDefaultOptions={{ + id: '$pageview', + name: '$pageview', + type: EntityTypes.EVENTS, + }} + buttonCopy="Add event matcher" + /> + {showDropEvents && ( + <> + + + Drop events that don't match + onChange({ ...value, drop_events })} + /> + + + + {!value?.drop_events ? ( +

+ Currently, this will run for all events that match the above + conditions. Any that do not match will be unmodified and ingested as + they are. +

+ ) : ( + + This will drop all events that don't match the above conditions. + Please ensure this is definitely intended. + + )} + + )} + + ) : null} + + ) + }} + {showMasking ? ( {({ value, onChange }) => ( -
+
{configuration.masking?.hash ? ( <> -
+
of
-
+
or until = {} - - data.inputs_schema?.forEach((input) => { - const secret = data.inputs?.[input.key]?.secret - let value = data.inputs?.[input.key]?.value +export const TYPES_WITH_GLOBALS: HogFunctionTypeType[] = ['transformation', 'destination'] - if (secret) { - // If set this means we haven't changed the value - sanitizedInputs[input.key] = { - value: '********', // Don't send the actual value - secret: true, +export function sanitizeConfiguration(data: HogFunctionConfigurationType): HogFunctionConfigurationType { + function sanitizeInputs( + data: HogFunctionConfigurationType | HogFunctionMappingType + ): Record { + const sanitizedInputs: Record = {} + data.inputs_schema?.forEach((input) => { + const secret = data.inputs?.[input.key]?.secret + let value = data.inputs?.[input.key]?.value + + if (secret) { + // If set this means we haven't changed the value + sanitizedInputs[input.key] = { + value: '********', // Don't send the actual value + secret: true, + } + return } - return - } - if (input.type === 'json' && typeof value === 'string') { - try { - value = JSON.parse(value) - } catch (e) { - // Ignore + if (input.type === 'json' && typeof value === 'string') { + try { + value = JSON.parse(value) + } catch (e) { + // Ignore + } } - } - sanitizedInputs[input.key] = { - value: value, - } - }) + sanitizedInputs[input.key] = { + value: value, + } + }) + return sanitizedInputs + } const payload: HogFunctionConfigurationType = { ...data, filters: data.filters, - inputs: sanitizedInputs, + mappings: data.mappings?.map((mapping) => ({ + ...mapping, + inputs: sanitizeInputs(mapping), + })), + inputs: sanitizeInputs(data), masking: data.masking?.hash ? data.masking : null, icon_url: data.icon_url, } @@ -107,15 +120,32 @@ const templateToConfiguration = ( template: HogFunctionTemplateType, subTemplate?: HogFunctionSubTemplateType | null ): HogFunctionConfigurationType => { - const inputs: Record = {} + function getInputs( + inputs_schema?: HogFunctionInputSchemaType[] | null, + subTemplate?: HogFunctionSubTemplateType | null + ): Record { + const inputs: Record = {} + inputs_schema?.forEach((schema) => { + if (typeof subTemplate?.inputs?.[schema.key] !== 'undefined') { + inputs[schema.key] = { value: subTemplate.inputs[schema.key] } + } else if (schema.default !== undefined) { + inputs[schema.key] = { value: schema.default } + } + }) + return inputs + } - template.inputs_schema?.forEach((schema) => { - if (typeof subTemplate?.inputs?.[schema.key] !== 'undefined') { - inputs[schema.key] = { value: subTemplate.inputs[schema.key] } - } else if (schema.default !== undefined) { - inputs[schema.key] = { value: schema.default } - } - }) + function getMappingInputs( + inputs_schema?: HogFunctionInputSchemaType[] | null + ): Record { + const inputs: Record = {} + inputs_schema?.forEach((schema) => { + if (schema.default !== undefined) { + inputs[schema.key] = { value: schema.default } + } + }) + return inputs + } return { type: template.type ?? 'destination', @@ -123,9 +153,15 @@ const templateToConfiguration = ( description: subTemplate?.name ?? template.description, inputs_schema: template.inputs_schema, filters: subTemplate?.filters ?? template.filters, + mappings: (subTemplate?.mappings ?? template.mappings)?.map( + (mapping): HogFunctionMappingType => ({ + ...mapping, + inputs: getMappingInputs(mapping.inputs_schema), + }) + ), hog: template.hog, icon_url: template.icon_url, - inputs, + inputs: getInputs(template.inputs_schema, subTemplate), enabled: template.type !== 'broadcast', } } @@ -139,7 +175,6 @@ export function convertToHogFunctionInvocationGlobals( return { project: { id: team?.id ?? 0, - name: team?.name ?? 'Default project', url: projectUrl, }, @@ -299,7 +334,7 @@ export const hogFunctionConfigurationLogic = kea { - if (values.type !== 'destination') { + if (values.type !== 'destination' && values.type !== 'site_destination') { return null } if (values.sparkline === null) { @@ -363,7 +398,7 @@ export const hogFunctionConfigurationLogic = kea { - if (!values.lastEventQuery || values.type !== 'destination') { + if (!values.lastEventQuery) { return values.sampleGlobals } const errorMessage = @@ -419,14 +454,17 @@ export const hogFunctionConfigurationLogic = kea { return { name: !data.name ? 'Name is required' : undefined, + mappings: + data.type === 'site_destination' && (!data.mappings || data.mappings.length === 0) + ? 'You must add at least one mapping' + : undefined, ...(values.inputFormErrors as any), } }, submit: async (data) => { - const payload = sanitizeConfiguration(data) - + const payload: Record = sanitizeConfiguration(data) // Only sent on create - ;(payload as any).template_id = props.templateId || values.hogFunction?.template?.id + payload.template_id = props.templateId || values.hogFunction?.template?.id if (!values.hasAddon) { // Remove the source field if the user doesn't have the addon @@ -434,7 +472,7 @@ export const hogFunctionConfigurationLogic = kea [s.hogFunction, s.template], + (hogFunction, template) => (hogFunction ?? template)?.type === 'site_destination', + ], defaultFormState: [ (s) => [s.template, s.hogFunction, s.subTemplate], (template, hogFunction, subTemplate): HogFunctionConfigurationType | null => { @@ -604,8 +646,26 @@ export const hogFunctionConfigurationLogic = kea [s.configuration], - (configuration): PropertyGroupFilter => { + (s) => [s.configuration, s.useMapping], + (configuration, useMapping): PropertyGroupFilter => { + // We're using mappings, but none are provided, so match zero events. + if (useMapping && !configuration.mappings?.length) { + return { + type: FilterLogicalOperator.And, + values: [ + { + type: FilterLogicalOperator.And, + values: [ + { + type: PropertyFilterType.HogQL, + key: 'false', + }, + ], + }, + ], + } + } + const seriesProperties: PropertyGroupFilterValue = { type: FilterLogicalOperator.Or, values: [], @@ -614,7 +674,21 @@ export const hogFunctionConfigurationLogic = kea [s.configuration, s.matchingFilters, s.type], (configuration, matchingFilters, type): TrendsQuery | null => { - if (type !== 'destination') { + if (type !== 'destination' && type !== 'site_destination') { return null } return { @@ -728,7 +802,7 @@ export const hogFunctionConfigurationLogic = kea [s.configuration, s.matchingFilters, s.groupTypes, s.type], (configuration, matchingFilters, groupTypes, type): EventsQuery | null => { - if (type !== 'destination') { + if (!TYPES_WITH_GLOBALS.includes(type)) { return null } const query: EventsQuery = { @@ -760,7 +834,6 @@ export const hogFunctionConfigurationLogic = kea [s.template, s.subTemplateId], (template, subTemplateId) => { @@ -772,8 +845,11 @@ export const hogFunctionConfigurationLogic = kea [router.selectors.searchParams], ({ sub_template }) => !!sub_template], + mappingTemplates: [ + (s) => [s.hogFunction, s.template], + (hogFunction, template) => template?.mapping_templates ?? hogFunction?.template?.mapping_templates ?? [], + ], })), listeners(({ actions, values, cache }) => ({ @@ -816,6 +892,20 @@ export const hogFunctionConfigurationLogic = kea t.include_by_default) + .map((template) => ({ + ...template, + inputs: template.inputs_schema?.reduce((acc, input) => { + acc[input.key] = { value: input.default } + return acc + }, {} as Record), + })), + ] + } const paramsFromUrl = cache.paramsFromUrl ?? {} const unsavedConfigurationToApply = (values.unsavedConfiguration?.timestamp ?? 0) > Date.now() - UNSAVED_CONFIGURATION_TTL diff --git a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionTestLogic.tsx b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionTestLogic.tsx index 9caa8bc369165f..7a06989b5b6854 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionTestLogic.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionTestLogic.tsx @@ -69,9 +69,7 @@ export const hogFunctionTestLogic = kea([ }), listeners(({ values, actions }) => ({ loadSampleGlobalsSuccess: () => { - if (values.type === 'destination') { - actions.setTestInvocationValue('globals', JSON.stringify(values.sampleGlobals, null, 2)) - } + actions.setTestInvocationValue('globals', JSON.stringify(values.sampleGlobals, null, 2)) }, })), forms(({ props, actions, values }) => ({ diff --git a/frontend/src/scenes/pipeline/hogfunctions/integrations/HogFunctionInputIntegration.tsx b/frontend/src/scenes/pipeline/hogfunctions/integrations/HogFunctionInputIntegration.tsx index f92b2f9123deb7..e73b679afcd401 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/integrations/HogFunctionInputIntegration.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/integrations/HogFunctionInputIntegration.tsx @@ -16,6 +16,7 @@ export function HogFunctionInputIntegration({ schema, ...props }: HogFunctionInp <> persistForUnload()} diff --git a/frontend/src/scenes/pipeline/hogfunctions/integrations/IntegrationChoice.tsx b/frontend/src/scenes/pipeline/hogfunctions/integrations/IntegrationChoice.tsx index cee61f7c80c88d..334c17ee3d859e 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/integrations/IntegrationChoice.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/integrations/IntegrationChoice.tsx @@ -7,10 +7,13 @@ import { IntegrationView } from 'lib/integrations/IntegrationView' import { capitalizeFirstLetter } from 'lib/utils' import { urls } from 'scenes/urls' +import { HogFunctionInputSchemaType } from '~/types' + export type IntegrationConfigureProps = { value?: number onChange?: (value: number | null) => void redirectUrl?: string + schema?: HogFunctionInputSchemaType integration?: string beforeRedirect?: () => void } @@ -18,6 +21,7 @@ export type IntegrationConfigureProps = { export function IntegrationChoice({ onChange, value, + schema, integration, redirectUrl, beforeRedirect, @@ -124,5 +128,13 @@ export function IntegrationChoice({ ) - return <>{integrationKind ? : button} + return ( + <> + {integrationKind ? ( + + ) : ( + button + )} + + ) } diff --git a/frontend/src/scenes/pipeline/hogfunctions/mapping/HogFunctionMapping.tsx b/frontend/src/scenes/pipeline/hogfunctions/mapping/HogFunctionMapping.tsx new file mode 100644 index 00000000000000..0cd0a86a5e5ae5 --- /dev/null +++ b/frontend/src/scenes/pipeline/hogfunctions/mapping/HogFunctionMapping.tsx @@ -0,0 +1,195 @@ +import { IconPlus, IconPlusSmall, IconTrash } from '@posthog/icons' +import { LemonButton, LemonLabel, LemonSelect } from '@posthog/lemon-ui' +import { useValues } from 'kea' +import { Group } from 'kea-forms' +import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' +import { LemonField } from 'lib/lemon-ui/LemonField' +import { getDefaultEventName } from 'lib/utils/getAppContext' +import { useState } from 'react' +import { ActionFilter } from 'scenes/insights/filters/ActionFilter/ActionFilter' +import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' + +import { groupsModel } from '~/models/groupsModel' +import { EntityTypes, FilterType, HogFunctionConfigurationType, HogFunctionMappingType } from '~/types' + +import { hogFunctionConfigurationLogic } from '../hogFunctionConfigurationLogic' +import { HogFunctionInputs } from '../HogFunctionInputs' + +export function HogFunctionMapping(): JSX.Element | null { + const { groupsTaxonomicTypes } = useValues(groupsModel) + const { useMapping, showSource, mappingTemplates } = useValues(hogFunctionConfigurationLogic) + const [selectedMappingTemplate, setSelectedMappingTemplate] = useState(null) + + if (!useMapping) { + return null + } + + return ( + + {({ value, onChange }) => { + const mappings = (value ?? []) as HogFunctionMappingType[] + return ( + <> + {mappings.map((mapping, index) => ( +
+
+ + Mapping #{index + 1} + + } + title="Delete graph series" + data-attr={`delete-prop-filter-${index}`} + noPadding + onClick={() => onChange(mappings.filter((_, i) => i !== index))} + /> +
+ + onChange(mappings.map((m, i) => (i === index ? { ...m, filters: f } : m))) + } + typeKey={`match-group-${index}`} + mathAvailability={MathAvailability.None} + hideRename + hideDuplicate + showNestedArrow={false} + actionsTaxonomicGroupTypes={[ + TaxonomicFilterGroupType.Events, + TaxonomicFilterGroupType.Actions, + ]} + propertiesTaxonomicGroupTypes={[ + TaxonomicFilterGroupType.EventProperties, + TaxonomicFilterGroupType.EventFeatureFlags, + TaxonomicFilterGroupType.Elements, + TaxonomicFilterGroupType.PersonProperties, + TaxonomicFilterGroupType.HogQLExpression, + ...groupsTaxonomicTypes, + ]} + propertyFiltersPopover + addFilterDefaultOptions={{ + id: '$pageview', + name: '$pageview', + type: EntityTypes.EVENTS, + }} + buttonCopy="Add event matcher" + /> + + { + onChange(mappings.map((m, i) => (i === index ? { ...m, [key]: value } : m))) + }} + /> + + {showSource ? ( + } + size="small" + type="secondary" + className="my-4" + onClick={() => { + onChange( + mappings.map((m, i) => { + if (i !== index) { + return m + } + const inputs_schema = m.inputs_schema ?? [] + return { + ...m, + inputs_schema: [ + ...inputs_schema, + { + type: 'string', + key: `var_${inputs_schema.length + 1}`, + label: '', + required: false, + }, + ], + } + }) + ) + }} + > + Add input variable + + ) : null} +
+ ))} +
+ New Mapping +
+ {mappingTemplates.length ? ( + ({ + label: t.name, + value: t.name, + }))} + /> + ) : null} + } + disabledReason={ + mappingTemplates.length && !selectedMappingTemplate + ? 'Select a mapping template' + : undefined + } + onClick={() => { + if (selectedMappingTemplate) { + const mappingTemplate = mappingTemplates.find( + (t) => t.name === selectedMappingTemplate + ) + if (mappingTemplate) { + const { name, ...mapping } = mappingTemplate + const inputs = mapping.inputs_schema + ? Object.fromEntries( + mapping.inputs_schema + .filter((m) => m.default !== undefined) + .map((m) => [ + m.key, + { value: structuredClone(m.default) }, + ]) + ) + : {} + onChange([...mappings, { ...mapping, inputs }]) + } + setSelectedMappingTemplate(null) + return + } + + const newMapping = { + inputs_schema: [], + inputs: {}, + filters: { + events: [ + { + id: getDefaultEventName(), + name: getDefaultEventName(), + type: EntityTypes.EVENTS, + order: 0, + properties: [], + }, + ], + actions: [], + }, + } + onChange([...mappings, newMapping]) + }} + > + Add mapping + +
+
+ + ) + }} +
+ ) +} diff --git a/frontend/src/scenes/pipeline/hogfunctions/urls.ts b/frontend/src/scenes/pipeline/hogfunctions/urls.ts index a26ce4a331d558..fce68f1b7a82b7 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/urls.ts +++ b/frontend/src/scenes/pipeline/hogfunctions/urls.ts @@ -26,7 +26,9 @@ export function hogFunctionUrl(type: HogFunctionTypeType | PipelineStage, id?: s } // Supports both hog function types and pipeline stages themselves as input -export function hogFunctionTypeToPipelineStage(type: string): PipelineStage { +export function hogFunctionTypeToPipelineStage( + type: string +): PipelineStage.Destination | PipelineStage.Transformation | PipelineStage.SiteApp { switch (type) { case 'site_destination': return PipelineStage.Destination @@ -38,6 +40,8 @@ export function hogFunctionTypeToPipelineStage(type: string): PipelineStage { return PipelineStage.SiteApp case 'site-app': return PipelineStage.SiteApp + case 'transformation': + return PipelineStage.Transformation default: return PipelineStage.Destination } diff --git a/frontend/src/scenes/pipeline/pipelineAccessLogic.tsx b/frontend/src/scenes/pipeline/pipelineAccessLogic.tsx index 1d8875dbbedfa1..41a3af9ee5aaed 100644 --- a/frontend/src/scenes/pipeline/pipelineAccessLogic.tsx +++ b/frontend/src/scenes/pipeline/pipelineAccessLogic.tsx @@ -5,7 +5,7 @@ import { AvailableFeature } from '~/types' import { canConfigurePlugins, canGloballyManagePlugins } from './access' import type { pipelineAccessLogicType } from './pipelineAccessLogicType' -import { Destination, NewDestinationItemType, PipelineBackend, SiteApp } from './types' +import { Destination, NewDestinationItemType, PipelineBackend, SiteApp, Transformation } from './types' export const pipelineAccessLogic = kea([ path(['scenes', 'pipeline', 'pipelineAccessLogic']), @@ -25,8 +25,10 @@ export const pipelineAccessLogic = kea([ canEnableDestination: [ (s) => [s.canEnableNewDestinations], - (canEnableNewDestinations): ((destination: Destination | NewDestinationItemType | SiteApp) => boolean) => { - return (destination: Destination | NewDestinationItemType | SiteApp) => { + ( + canEnableNewDestinations + ): ((destination: Destination | NewDestinationItemType | SiteApp | Transformation) => boolean) => { + return (destination: Destination | NewDestinationItemType | SiteApp | Transformation) => { return destination.backend === PipelineBackend.HogFunction ? ('hog_function' in destination ? destination.hog_function.type === 'site_destination' || diff --git a/frontend/src/scenes/pipeline/types.ts b/frontend/src/scenes/pipeline/types.ts index b9621ad253cca1..3c1c69cf0f318a 100644 --- a/frontend/src/scenes/pipeline/types.ts +++ b/frontend/src/scenes/pipeline/types.ts @@ -124,18 +124,14 @@ export function convertToPipelineNode( ? Source : never { let node: PipelineNode + // check if type is a hog function if ('hog' in candidate) { node = { stage: stage as PipelineStage.Destination, backend: PipelineBackend.HogFunction, interval: 'realtime', - id: - candidate.type === 'destination' || - candidate.type === 'site_destination' || - candidate.type === 'site_app' - ? `hog-${candidate.id}` - : candidate.id, + id: `hog-${candidate.id}`, name: candidate.name, description: candidate.description, enabled: candidate.enabled, diff --git a/frontend/src/scenes/project-homepage/WatchNextPanel.tsx b/frontend/src/scenes/project-homepage/WatchNextPanel.tsx index 1f7917849e867e..4b5714bf935452 100644 --- a/frontend/src/scenes/project-homepage/WatchNextPanel.tsx +++ b/frontend/src/scenes/project-homepage/WatchNextPanel.tsx @@ -69,7 +69,6 @@ export function RecordingRow({ recording }: RecordingRowProps): JSX.Element { onClick={() => { openSessionPlayer({ id: recording.id, - matching_events: recording.matching_events, }) reportRecordingOpenedFromRecentRecordingList() }} diff --git a/frontend/src/scenes/session-recordings/SessionRecordings.tsx b/frontend/src/scenes/session-recordings/SessionRecordings.tsx index db612745ab2a09..d06f0a27c5cbd4 100644 --- a/frontend/src/scenes/session-recordings/SessionRecordings.tsx +++ b/frontend/src/scenes/session-recordings/SessionRecordings.tsx @@ -10,10 +10,12 @@ import { import { PageHeader } from 'lib/components/PageHeader' import { upgradeModalLogic } from 'lib/components/UpgradeModal/upgradeModalLogic' import { VersionCheckerBanner } from 'lib/components/VersionChecker/VersionCheckerBanner' +import { FEATURE_FLAGS } from 'lib/constants' import { useAsyncHandler } from 'lib/hooks/useAsyncHandler' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { LemonTabs } from 'lib/lemon-ui/LemonTabs' import { Spinner } from 'lib/lemon-ui/Spinner/Spinner' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' import { SceneExport } from 'scenes/sceneTypes' @@ -140,7 +142,10 @@ function Warnings(): JSX.Element { type: AuthorizedUrlListType.RECORDING_DOMAINS, }) const { suggestions, authorizedUrls } = useValues(theAuthorizedUrlsLogic) + const { featureFlags } = useValues(featureFlagLogic) + const mightBeRefusingRecordings = suggestions.length > 0 && authorizedUrls.length > 0 + const settingLevel = featureFlags[FEATURE_FLAGS.ENVIRONMENTS] ? 'environment' : 'project' return ( <> @@ -156,7 +161,7 @@ function Warnings(): JSX.Element { children: 'Configure', }} > - Session recordings are currently disabled for this project. + Session recordings are currently disabled for this {settingLevel}. ) : null} diff --git a/frontend/src/scenes/session-recordings/components/OverviewGrid.tsx b/frontend/src/scenes/session-recordings/components/OverviewGrid.tsx index 2a1ca4749c44a1..5dd92344b995d4 100644 --- a/frontend/src/scenes/session-recordings/components/OverviewGrid.tsx +++ b/frontend/src/scenes/session-recordings/components/OverviewGrid.tsx @@ -1,6 +1,26 @@ import { Tooltip } from '@posthog/lemon-ui' +import { ReactNode } from 'react' -export function OverviewGrid({ children }: { children: React.ReactNode }): JSX.Element { +interface OverviewItemBase { + icon?: ReactNode + label: string + tooltipTitle?: string +} + +type TextOverviewItem = OverviewItemBase & { + type: 'text' + value: ReactNode +} + +type PropertyOverviewItem = OverviewItemBase & { + type: 'property' + property: string + value?: string | undefined +} + +export type OverviewItem = TextOverviewItem | PropertyOverviewItem + +export function OverviewGrid({ children }: { children: ReactNode }): JSX.Element { return (
@@ -14,16 +34,20 @@ export function OverviewGridItem({ children, description, label, + icon, }: { - children: React.ReactNode - description: React.ReactNode - label: React.ReactNode + children?: ReactNode + description: ReactNode + label: ReactNode + icon?: ReactNode }): JSX.Element { return (
-
{label}
-
{children}
+
+ {icon} {label} +
+
{children}
) diff --git a/frontend/src/scenes/session-recordings/components/SimpleTimeLabel.tsx b/frontend/src/scenes/session-recordings/components/SimpleTimeLabel.tsx index 20c280ae12fd4d..60d88a9aa4a91a 100644 --- a/frontend/src/scenes/session-recordings/components/SimpleTimeLabel.tsx +++ b/frontend/src/scenes/session-recordings/components/SimpleTimeLabel.tsx @@ -1,3 +1,4 @@ +import clsx from 'clsx' import { Dayjs, dayjs } from 'lib/dayjs' import { shortTimeZone } from 'lib/utils' @@ -9,14 +10,31 @@ function formatStringFor(d: Dayjs): string { return 'DD/MM/YYYY HH:mm:ss' } -export function SimpleTimeLabel({ startTime, isUTC }: { startTime: string | number; isUTC: boolean }): JSX.Element { +export function SimpleTimeLabel({ + startTime, + isUTC, + muted = true, + size = 'xsmall', +}: { + startTime: string | number | Dayjs + isUTC: boolean + muted?: boolean + size?: 'small' | 'xsmall' +}): JSX.Element { let d = dayjs(startTime) if (isUTC) { d = d.tz('UTC') } return ( -
+
{d.format(formatStringFor(d))} {isUTC ? 'UTC' : shortTimeZone(undefined, dayjs(d).toDate())}
) diff --git a/frontend/src/scenes/session-recordings/detail/SessionRecordingDetail.tsx b/frontend/src/scenes/session-recordings/detail/SessionRecordingDetail.tsx index 33bbdfb3e4acb3..0c7547badc5189 100644 --- a/frontend/src/scenes/session-recordings/detail/SessionRecordingDetail.tsx +++ b/frontend/src/scenes/session-recordings/detail/SessionRecordingDetail.tsx @@ -2,8 +2,10 @@ import './SessionRecordingScene.scss' import { useValues } from 'kea' import { PageHeader } from 'lib/components/PageHeader' +import { FEATURE_FLAGS } from 'lib/constants' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { Link } from 'lib/lemon-ui/Link' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { SceneExport } from 'scenes/sceneTypes' import { sessionRecordingDetailLogic, @@ -24,14 +26,19 @@ export const scene: SceneExport = { export function SessionRecordingDetail({ id }: SessionRecordingDetailLogicProps = {}): JSX.Element { const { currentTeam } = useValues(teamLogic) + const { featureFlags } = useValues(featureFlagLogic) + + const settingLevel = featureFlags[FEATURE_FLAGS.ENVIRONMENTS] ? 'environment' : 'project' + return (
{currentTeam && !currentTeam?.session_recording_opt_in ? (
- Session recordings are currently disabled for this project. To use this feature, please go to - your project settings and enable it. + Session recordings are currently disabled for this {settingLevel}. To use this feature, please + go to your project settings and + enable it.
) : null} diff --git a/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx b/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx index 919fa8b6bd0427..0498fb9efe05ff 100644 --- a/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx +++ b/frontend/src/scenes/session-recordings/player/controller/PlayerController.tsx @@ -4,6 +4,7 @@ import { useActions, useValues } from 'kea' import { useKeyboardHotkeys } from 'lib/hooks/useKeyboardHotkeys' import { IconFullScreen, IconSync } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' +import { humanFriendlyDuration } from 'lib/utils' import { SettingsBar, SettingsButton, @@ -25,13 +26,18 @@ import { SeekSkip, Timestamp } from './PlayerControllerTime' import { Seekbar } from './Seekbar' function SetPlaybackSpeed(): JSX.Element { - const { speed } = useValues(sessionRecordingPlayerLogic) + const { speed, sessionPlayerData } = useValues(sessionRecordingPlayerLogic) const { setSpeed } = useActions(sessionRecordingPlayerLogic) return ( ({ - label: `${speedToggle}x`, + label: ( +
+ {speedToggle}x + ({humanFriendlyDuration(sessionPlayerData.durationMs / speedToggle / 1000)}) +
+ ), onClick: () => setSpeed(speedToggle), active: speed === speedToggle && speedToggle !== 1, status: speed === speedToggle ? 'danger' : 'default', diff --git a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorBottomSettings.tsx b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorBottomSettings.tsx index d5341305dd2c83..b6287ef3fdb30e 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorBottomSettings.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorBottomSettings.tsx @@ -1,8 +1,9 @@ import './PlayerInspectorList.scss' +import { BaseIcon, IconCheck } from '@posthog/icons' import { useActions, useValues } from 'kea' import { userPreferencesLogic } from 'lib/logic/userPreferencesLogic' -import { SettingsBar, SettingsToggle } from 'scenes/session-recordings/components/PanelSettings' +import { SettingsBar, SettingsMenu, SettingsToggle } from 'scenes/session-recordings/components/PanelSettings' import { miniFiltersLogic } from 'scenes/session-recordings/player/inspector/miniFiltersLogic' import { FilterableInspectorListItemTypes } from '~/types' @@ -10,32 +11,39 @@ import { FilterableInspectorListItemTypes } from '~/types' import { sessionRecordingPlayerLogic } from '../sessionRecordingPlayerLogic' import { playerInspectorLogic } from './playerInspectorLogic' -function HideProperties(): JSX.Element | null { +function HideProperties(): JSX.Element { const { logicProps } = useValues(sessionRecordingPlayerLogic) const inspectorLogic = playerInspectorLogic(logicProps) - const { allItemsByItemType } = useValues(inspectorLogic) + const { hasEventsToDisplay } = useValues(inspectorLogic) + const { hasEventsFiltersSelected } = useValues(miniFiltersLogic) + + const { hideNullValues } = useValues(userPreferencesLogic) + const { setHideNullValues } = useActions(userPreferencesLogic) - const { miniFiltersForType } = useValues(miniFiltersLogic) const { hidePostHogPropertiesInTable } = useValues(userPreferencesLogic) const { setHidePostHogPropertiesInTable } = useActions(userPreferencesLogic) - const hasEventsFiltersSelected = miniFiltersForType(FilterableInspectorListItemTypes.EVENTS).some((x) => x.enabled) - const hasEventsToDisplay = allItemsByItemType[FilterableInspectorListItemTypes.EVENTS]?.length > 0 - return ( - setHidePostHogPropertiesInTable(!hidePostHogPropertiesInTable)} - disabledReason={ - hasEventsToDisplay && hasEventsFiltersSelected ? undefined : 'There are no events in the list' - } - active={hidePostHogPropertiesInTable} + {hidePostHogPropertiesInTable ? : } Hide PostHog properties, + onClick: () => setHidePostHogPropertiesInTable(!hidePostHogPropertiesInTable), + active: hidePostHogPropertiesInTable, + disabledReason: + hasEventsToDisplay && hasEventsFiltersSelected ? undefined : 'There are no events in the list', + }, + { + label: <>{hideNullValues ? : } Hide null values, + onClick: () => setHideNullValues(!hideNullValues), + active: hideNullValues, + disabledReason: + hasEventsToDisplay && hasEventsFiltersSelected ? undefined : 'There are no events in the list', + }, + ]} + label="Hide properties" + highlightWhenActive={false} /> ) } diff --git a/frontend/src/scenes/session-recordings/player/inspector/miniFiltersLogic.ts b/frontend/src/scenes/session-recordings/player/inspector/miniFiltersLogic.ts index 357e0eb4a12f64..27953c893b9cba 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/miniFiltersLogic.ts +++ b/frontend/src/scenes/session-recordings/player/inspector/miniFiltersLogic.ts @@ -176,6 +176,11 @@ export const miniFiltersLogic = kea([ }, ], + hasEventsFiltersSelected: [ + (s) => [s.miniFiltersForType], + (miniFiltersForType) => miniFiltersForType(FilterableInspectorListItemTypes.EVENTS).some((x) => x.enabled), + ], + miniFilters: [ (s) => [s.selectedMiniFilters], (selectedMiniFilters): SharedListMiniFilter[] => { diff --git a/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts b/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts index f3d380dbc14f7c..5fe1914f28852e 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts +++ b/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.ts @@ -4,7 +4,9 @@ import { actions, connect, events, kea, key, listeners, path, props, propsChange import { loaders } from 'kea-loaders' import api from 'lib/api' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' +import { FEATURE_FLAGS } from 'lib/constants' import { Dayjs, dayjs } from 'lib/dayjs' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { getCoreFilterDefinition } from 'lib/taxonomy' import { eventToDescription, humanizeBytes, objectsEqual, toParams } from 'lib/utils' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' @@ -22,6 +24,7 @@ import { MatchingEventsMatchType, } from 'scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic' +import { RecordingsQuery } from '~/queries/schema' import { FilterableInspectorListItemTypes, MatchedRecordingEvent, @@ -248,6 +251,8 @@ export const playerInspectorLogic = kea([ ['allPerformanceEvents'], sessionRecordingDataLogic(props), ['trackedWindow'], + featureFlagLogic, + ['featureFlags'], ], })), actions(() => ({ @@ -275,7 +280,7 @@ export const playerInspectorLogic = kea([ }, ], })), - loaders(({ props }) => ({ + loaders(({ props, values }) => ({ matchingEventUUIDs: [ [] as MatchedRecordingEvent[] | null, { @@ -297,17 +302,29 @@ export const playerInspectorLogic = kea([ if (!filters) { throw new Error('Backend matching events type must include its filters') } - const params = toParams({ + // as_query is a temporary parameter as a flag + // to let the backend know not to convert the query to a legacy filter when processing + const params: RecordingsQuery & { as_query?: boolean } = { ...convertUniversalFiltersToRecordingsQuery(filters), session_ids: [props.sessionRecordingId], - }) - const response = await api.recordings.getMatchingEvents(params) + } + if (values.listAPIAsQuery) { + params.as_query = true + } + const response = await api.recordings.getMatchingEvents(toParams(params)) return response.results.map((x) => ({ uuid: x } as MatchedRecordingEvent)) }, }, ], })), selectors(({ props }) => ({ + listAPIAsQuery: [ + (s) => [s.featureFlags], + (featureFlags) => { + return !!featureFlags[FEATURE_FLAGS.REPLAY_LIST_RECORDINGS_AS_QUERY] + }, + ], + allowMatchingEventsFilter: [ (s) => [s.miniFilters], (miniFilters): boolean => { @@ -984,6 +1001,11 @@ export const playerInspectorLogic = kea([ return itemsByType }, ], + + hasEventsToDisplay: [ + (s) => [s.allItemsByItemType], + (allItemsByItemType): boolean => allItemsByItemType[FilterableInspectorListItemTypes.EVENTS]?.length > 0, + ], })), listeners(({ values, actions }) => ({ setItemExpanded: ({ index, expanded }) => { diff --git a/frontend/src/scenes/session-recordings/player/playerMetaLogic.ts b/frontend/src/scenes/session-recordings/player/playerMetaLogic.tsx similarity index 84% rename from frontend/src/scenes/session-recordings/player/playerMetaLogic.ts rename to frontend/src/scenes/session-recordings/player/playerMetaLogic.tsx index 262178b32c0088..9d2267922cc2dc 100644 --- a/frontend/src/scenes/session-recordings/player/playerMetaLogic.ts +++ b/frontend/src/scenes/session-recordings/player/playerMetaLogic.tsx @@ -1,3 +1,4 @@ +import { IconCursorClick, IconKeyboard, IconWarning } from '@posthog/icons' import { eventWithTime } from '@rrweb/types' import { actions, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' @@ -8,6 +9,7 @@ import { getCoreFilterDefinition } from 'lib/taxonomy' import { ceilMsToClosestSecond, findLastIndex, objectsEqual } from 'lib/utils' import posthog from 'posthog-js' import { countryCodeToName } from 'scenes/insights/views/WorldMap' +import { OverviewItem } from 'scenes/session-recordings/components/OverviewGrid' import { sessionRecordingDataLogic } from 'scenes/session-recordings/player/sessionRecordingDataLogic' import { sessionRecordingPlayerLogic, @@ -16,17 +18,10 @@ import { import { PersonType } from '~/types' +import { SimpleTimeLabel } from '../components/SimpleTimeLabel' import { sessionRecordingsListPropertiesLogic } from '../playlist/sessionRecordingsListPropertiesLogic' import type { playerMetaLogicType } from './playerMetaLogicType' -export interface OverviewItem { - property: string - label: string - value: string - type: 'text' | 'icon' - tooltipTitle?: string -} - const browserPropertyKeys = ['$geoip_country_code', '$browser', '$device_type', '$os'] const mobilePropertyKeys = ['$geoip_country_code', '$device_type', '$os_name'] const recordingPropertyKeys = ['click_count', 'keypress_count', 'console_error_count'] as const @@ -133,6 +128,14 @@ export const playerMetaLogic = kea([ return sessionPlayerData.start ?? null }, ], + + endTime: [ + (s) => [s.sessionPlayerData], + (sessionPlayerData) => { + return sessionPlayerData.end ?? null + }, + ], + currentWindowIndex: [ (s) => [s.windowIds, s.currentSegment], (windowIds, currentSegment) => { @@ -186,19 +189,39 @@ export const playerMetaLogic = kea([ }, ], overviewItems: [ - (s) => [s.sessionPlayerMetaData], - (sessionPlayerMetaData) => { + (s) => [s.sessionPlayerMetaData, s.startTime, s.endTime], + (sessionPlayerMetaData, startTime, endTime) => { const items: OverviewItem[] = [] + if (startTime) { + items.push({ + label: 'Session start', + value: , + type: 'text', + }) + } + if (endTime) { + items.push({ + label: 'Session end', + value: , + type: 'text', + }) + } recordingPropertyKeys.forEach((property) => { if (sessionPlayerMetaData?.[property]) { items.push({ - label: `${sessionPlayerMetaData[property]} ${ - getCoreFilterDefinition(property, TaxonomicFilterGroupType.Replay)?.label ?? property - }`, - value: '', + icon: + property === 'click_count' ? ( + + ) : property === 'keypress_count' ? ( + + ) : property === 'console_error_count' ? ( + + ) : undefined, + label: + getCoreFilterDefinition(property, TaxonomicFilterGroupType.Replay)?.label ?? property, + value: `${sessionPlayerMetaData[property]}`, type: 'text', - property, }) } }) @@ -223,7 +246,7 @@ export const playerMetaLogic = kea([ property, value, tooltipTitle, - type: 'icon', + type: 'property', property, }) } diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts index ffd4deb36e2c4f..cfda001ed9ea59 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts @@ -528,7 +528,7 @@ export const sessionRecordingPlayerLogic = kea( } if (sessionPlayerData.segments.length) { for (const segment of sessionPlayerData.segments) { - if (segment.startTimestamp <= timestamp && segment.endTimestamp >= timestamp) { + if (segment.startTimestamp <= timestamp && timestamp <= segment.endTimestamp) { return segment } } @@ -864,22 +864,28 @@ export const sessionRecordingPlayerLogic = kea( actions.setCurrentSegment(segment) } - if (!values.snapshotsLoaded) { - // We haven't started properly loading, or we're still polling so nothing to do - } else if (!values.isRealtimePolling && !values.snapshotsLoading && segment?.kind === 'buffer') { - // If not currently loading anything, - // and part of the recording hasn't loaded, set error state - values.player?.replayer?.pause() - actions.endBuffer() - console.error("Error: Player tried to seek to a position that hasn't loaded yet") - actions.setErrorPlayerState(true) - } - // If next time is greater than last buffered time, set to buffering else if (segment?.kind === 'buffer') { - values.player?.replayer?.pause() - actions.startBuffer() - actions.setErrorPlayerState(false) + const isStillLoading = values.isRealtimePolling || values.snapshotsLoading + const isPastEnd = values.sessionPlayerData.end && timestamp > values.sessionPlayerData.end.valueOf() + if (isStillLoading) { + values.player?.replayer?.pause() + actions.startBuffer() + actions.setErrorPlayerState(false) + } else { + if (isPastEnd) { + actions.setEndReached(true) + } else { + // If not currently loading anything, + // not past the end of the recording, + // and part of the recording hasn't loaded, + // set error state + values.player?.replayer?.pause() + actions.endBuffer() + console.error("Error: Player tried to seek to a position that hasn't loaded yet") + actions.setErrorPlayerState(true) + } + } } // If not forced to play and if last playing state was pause, pause diff --git a/frontend/src/scenes/session-recordings/player/sidebar/PlayerSidebarOverviewGrid.tsx b/frontend/src/scenes/session-recordings/player/sidebar/PlayerSidebarOverviewGrid.tsx index 161e5acf59f7af..18cc517c3be541 100644 --- a/frontend/src/scenes/session-recordings/player/sidebar/PlayerSidebarOverviewGrid.tsx +++ b/frontend/src/scenes/session-recordings/player/sidebar/PlayerSidebarOverviewGrid.tsx @@ -13,8 +13,13 @@ export function PlayerSidebarOverviewGrid(): JSX.Element {
{overviewItems.map((item) => ( - - {item.type === 'icon' ? ( + + {item.type === 'property' ? ( ) : ( item.value diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx index 06d87abfc35d7f..79d19b833f1c37 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingPreview.tsx @@ -199,8 +199,8 @@ export function SessionRecordingPreview({ )} onClick={() => onClick?.()} > -
-
+
+
{asDisplay(recording.person)}
@@ -218,8 +218,8 @@ export function SessionRecordingPreview({ )}
-
-
+
+
, + allowForTeam: (t) => !!t?.recording_domains?.length, + }, + { + id: 'replay-triggers', + title: 'Replay triggers', + component: , }, { id: 'replay-ingestion', diff --git a/frontend/src/scenes/settings/environment/ReplayTriggers.tsx b/frontend/src/scenes/settings/environment/ReplayTriggers.tsx new file mode 100644 index 00000000000000..48e79bea18cecf --- /dev/null +++ b/frontend/src/scenes/settings/environment/ReplayTriggers.tsx @@ -0,0 +1,262 @@ +import { IconPencil, IconPlus, IconTrash } from '@posthog/icons' +import clsx from 'clsx' +import { useActions, useValues } from 'kea' +import { Form } from 'kea-forms' +import { EventSelect } from 'lib/components/EventSelect/EventSelect' +import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { LemonButton } from 'lib/lemon-ui/LemonButton' +import { LemonDialog } from 'lib/lemon-ui/LemonDialog' +import { LemonField } from 'lib/lemon-ui/LemonField' +import { LemonInput } from 'lib/lemon-ui/LemonInput' +import { LemonLabel } from 'lib/lemon-ui/LemonLabel' +import { replayTriggersLogic } from 'scenes/settings/environment/replayTriggersLogic' +import { SupportedPlatforms } from 'scenes/settings/environment/SessionRecordingSettings' + +import { SessionReplayUrlTriggerConfig } from '~/types' + +function UrlConfigForm({ + type, + onCancel, + isSubmitting, +}: { + type: 'trigger' | 'blocklist' + onCancel: () => void + isSubmitting: boolean +}): JSX.Element { + return ( +
+
+ + We always wrap the URL regex with anchors to avoid unexpected behavior (if you do not). This is + because
https://example.com/
does not only match the homepage. You'd + need
^https://example.com/$
+
+ + Matching regex: + + + + +
+
+ + Cancel + + + Save + +
+
+ ) +} + +function UrlConfigRow({ + trigger, + index, + type, + editIndex, + onEdit, + onRemove, +}: { + trigger: SessionReplayUrlTriggerConfig + index: number + type: 'trigger' | 'blocklist' + editIndex: number | null + onEdit: (index: number) => void + onRemove: (index: number) => void +}): JSX.Element { + if (editIndex === index) { + return ( +
+ onEdit(-1)} isSubmitting={false} /> +
+ ) + } + + return ( +
+ + {trigger.matching === 'regex' ? 'Matches regex: ' : ''} {trigger.url} + +
+ } onClick={() => onEdit(index)} tooltip="Edit" center /> + } + tooltip={`Remove URL ${type}`} + center + onClick={() => { + LemonDialog.open({ + title: <>Remove URL {type}, + description: `Are you sure you want to remove this URL ${type}?`, + primaryButton: { + status: 'danger', + children: 'Remove', + onClick: () => onRemove(index), + }, + secondaryButton: { + children: 'Cancel', + }, + }) + }} + /> +
+
+ ) +} + +function UrlConfigSection({ + type, + title, + description, + ...props +}: { + type: 'trigger' | 'blocklist' + title: string + description: string + isAddFormVisible: boolean + config: SessionReplayUrlTriggerConfig[] | null + editIndex: number | null + isSubmitting: boolean + onAdd: () => void + onCancel: () => void + onEdit: (index: number) => void + onRemove: (index: number) => void +}): JSX.Element { + return ( +
+
+ {title} + } + data-attr={`session-replay-add-url-${type}`} + > + Add + +
+

{description}

+ + {props.isAddFormVisible && ( + + )} + {props.config?.map((trigger, index) => ( + + ))} +
+ ) +} + +function UrlTriggerOptions(): JSX.Element | null { + const { isAddUrlTriggerConfigFormVisible, urlTriggerConfig, editUrlTriggerIndex, isProposedUrlTriggerSubmitting } = + useValues(replayTriggersLogic) + const { newUrlTrigger, removeUrlTrigger, setEditUrlTriggerIndex, cancelProposingUrlTrigger } = + useActions(replayTriggersLogic) + + return ( + + ) +} + +function UrlBlocklistOptions(): JSX.Element | null { + const { + isAddUrlBlocklistConfigFormVisible, + urlBlocklistConfig, + editUrlBlocklistIndex, + isProposedUrlBlocklistSubmitting, + } = useValues(replayTriggersLogic) + const { newUrlBlocklist, removeUrlBlocklist, setEditUrlBlocklistIndex, cancelProposingUrlBlocklist } = + useActions(replayTriggersLogic) + + return ( + + ) +} + +function EventTriggerOptions(): JSX.Element | null { + const { eventTriggerConfig } = useValues(replayTriggersLogic) + const { updateEventTriggerConfig } = useActions(replayTriggersLogic) + + return ( +
+
+ Event emitted +
+

+ Session recording will be started immediately before PostHog queues any of these events to be sent to + the backend. +

+ + { + updateEventTriggerConfig(includedEvents) + }} + selectedEvents={eventTriggerConfig ?? []} + addElement={ + } sideIcon={null}> + Add event + + } + /> +
+ ) +} + +export function ReplayTriggers(): JSX.Element { + return ( +
+ +

+ Use the settings below to control when recordings are started or paused. If no triggers are selected, + then recordings will always start if enabled. +

+ + + +
+ ) +} diff --git a/frontend/src/scenes/settings/environment/SessionRecordingIngestionSettings.tsx b/frontend/src/scenes/settings/environment/SessionRecordingIngestionSettings.tsx index f645c195e5440a..69258301575c24 100644 --- a/frontend/src/scenes/settings/environment/SessionRecordingIngestionSettings.tsx +++ b/frontend/src/scenes/settings/environment/SessionRecordingIngestionSettings.tsx @@ -1,32 +1,23 @@ -import { IconPencil, IconPlus, IconTrash } from '@posthog/icons' import { LemonButton, - LemonDialog, - LemonInput, LemonSegmentedButton, LemonSegmentedButtonOption, LemonSelect, Link, Spinner, } from '@posthog/lemon-ui' -import clsx from 'clsx' import { useActions, useValues } from 'kea' -import { Form } from 'kea-forms' -import { EventSelect } from 'lib/components/EventSelect/EventSelect' -import { FlaggedFeature } from 'lib/components/FlaggedFeature' import { FlagSelector } from 'lib/components/FlagSelector' import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini' -import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' -import { FEATURE_FLAGS, SESSION_REPLAY_MINIMUM_DURATION_OPTIONS } from 'lib/constants' +import { SESSION_REPLAY_MINIMUM_DURATION_OPTIONS } from 'lib/constants' import { IconCancel } from 'lib/lemon-ui/icons' -import { LemonField } from 'lib/lemon-ui/LemonField' import { LemonLabel } from 'lib/lemon-ui/LemonLabel/LemonLabel' import { SupportedPlatforms } from 'scenes/settings/environment/SessionRecordingSettings' import { sessionReplayIngestionControlLogic } from 'scenes/settings/environment/sessionReplayIngestionControlLogic' import { teamLogic } from 'scenes/teamLogic' import { userLogic } from 'scenes/userLogic' -import { AvailableFeature, MultivariateFlagOptions, SessionReplayUrlTriggerConfig } from '~/types' +import { AvailableFeature, MultivariateFlagOptions } from '~/types' function variantOptions(multivariate: MultivariateFlagOptions | undefined): LemonSegmentedButtonOption[] { if (!multivariate) { @@ -124,237 +115,6 @@ function LinkedFlagSelector(): JSX.Element | null { ) } -function UrlConfigForm({ - type, - onCancel, - isSubmitting, -}: { - type: 'trigger' | 'blocklist' - onCancel: () => void - isSubmitting: boolean -}): JSX.Element { - return ( -
-
- - - - - - -
-
- - Cancel - - - Save - -
-
- ) -} - -// New shared row component -function UrlConfigRow({ - trigger, - index, - type, - editIndex, - onEdit, - onRemove, -}: { - trigger: SessionReplayUrlTriggerConfig - index: number - type: 'trigger' | 'blocklist' - editIndex: number | null - onEdit: (index: number) => void - onRemove: (index: number) => void -}): JSX.Element { - if (editIndex === index) { - return ( -
- onEdit(-1)} isSubmitting={false} /> -
- ) - } - - return ( -
- - {trigger.matching === 'regex' ? 'Matches regex: ' : ''} {trigger.url} - -
- } onClick={() => onEdit(index)} tooltip="Edit" center /> - } - tooltip={`Remove URL ${type}`} - center - onClick={() => { - LemonDialog.open({ - title: <>Remove URL {type}, - description: `Are you sure you want to remove this URL ${type}?`, - primaryButton: { - status: 'danger', - children: 'Remove', - onClick: () => onRemove(index), - }, - secondaryButton: { - children: 'Cancel', - }, - }) - }} - /> -
-
- ) -} - -function UrlConfigSection({ - type, - title, - description, - ...props -}: { - type: 'trigger' | 'blocklist' - title: string - description: string - isAddFormVisible: boolean - config: SessionReplayUrlTriggerConfig[] | null - editIndex: number | null - isSubmitting: boolean - onAdd: () => void - onCancel: () => void - onEdit: (index: number) => void - onRemove: (index: number) => void -}): JSX.Element { - return ( -
-
- {title} - } - data-attr={`session-replay-add-url-${type}`} - > - Add - -
-

{description}

- -

{title} is only available for JavaScript Web.

- - {props.isAddFormVisible && ( - - )} - {props.config?.map((trigger, index) => ( - - ))} -
- ) -} - -function UrlTriggerOptions(): JSX.Element | null { - const { isAddUrlTriggerConfigFormVisible, urlTriggerConfig, editUrlTriggerIndex, isProposedUrlTriggerSubmitting } = - useValues(sessionReplayIngestionControlLogic) - const { newUrlTrigger, removeUrlTrigger, setEditUrlTriggerIndex, cancelProposingUrlTrigger } = useActions( - sessionReplayIngestionControlLogic - ) - - return ( - - ) -} - -function UrlBlocklistOptions(): JSX.Element | null { - const { - isAddUrlBlocklistConfigFormVisible, - urlBlocklistConfig, - editUrlBlocklistIndex, - isProposedUrlBlocklistSubmitting, - } = useValues(sessionReplayIngestionControlLogic) - const { newUrlBlocklist, removeUrlBlocklist, setEditUrlBlocklistIndex, cancelProposingUrlBlocklist } = useActions( - sessionReplayIngestionControlLogic - ) - - return ( - - ) -} - -function EventTriggerOptions(): JSX.Element | null { - const { eventTriggerConfig } = useValues(sessionReplayIngestionControlLogic) - const { updateEventTriggerConfig } = useActions(sessionReplayIngestionControlLogic) - - return ( -
-
- Event emitted -
-

- Session recording will be started immediately before PostHog queues any of these events to be sent to - the backend. -

-

Event emitted is only available for JavaScript Web.

- { - updateEventTriggerConfig(includedEvents) - }} - selectedEvents={eventTriggerConfig ?? []} - addElement={ - } sideIcon={null}> - Add event - - } - /> -
- ) -} - export function SessionRecordingIngestionSettings(): JSX.Element | null { const { updateCurrentTeam } = useActions(teamLogic) const { currentTeam } = useValues(teamLogic) @@ -513,11 +273,6 @@ export function SessionRecordingIngestionSettings(): JSX.Element | null { )} - - - - - ) diff --git a/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx b/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx index 4b53a895a99961..ff6650d2bffd52 100644 --- a/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx +++ b/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx @@ -238,10 +238,10 @@ export function NetworkCaptureSettings(): JSX.Element { Learn how to mask header and payload values in our docs

- +
diff --git a/frontend/src/scenes/settings/environment/replayTriggersLogic.ts b/frontend/src/scenes/settings/environment/replayTriggersLogic.ts new file mode 100644 index 00000000000000..835a38728b7318 --- /dev/null +++ b/frontend/src/scenes/settings/environment/replayTriggersLogic.ts @@ -0,0 +1,233 @@ +import { actions, connect, kea, listeners, path, reducers, selectors, sharedListeners } from 'kea' +import { forms } from 'kea-forms' +import { subscriptions } from 'kea-subscriptions' +import { teamLogic } from 'scenes/teamLogic' + +import { SessionReplayUrlTriggerConfig, TeamPublicType, TeamType } from '~/types' + +import type { replayTriggersLogicType } from './replayTriggersLogicType' + +const NEW_URL_TRIGGER = { url: '', matching: 'regex' } + +function ensureAnchored(url: string): string { + url = url.startsWith('^') ? url.substring(1) : url + url = url.endsWith('$') ? url.substring(0, url.length - 1) : url + return `^${url}$` +} + +export const replayTriggersLogic = kea([ + path(['scenes', 'settings', 'project', 'replayTriggersLogic']), + actions({ + setUrlTriggerConfig: (urlTriggerConfig: SessionReplayUrlTriggerConfig[]) => ({ urlTriggerConfig }), + addUrlTrigger: (urlTriggerConfig: SessionReplayUrlTriggerConfig) => ({ urlTriggerConfig }), + removeUrlTrigger: (index: number) => ({ index }), + updateUrlTrigger: (index: number, urlTriggerConfig: SessionReplayUrlTriggerConfig) => ({ + index, + urlTriggerConfig, + }), + setEditUrlTriggerIndex: (originalIndex: number | null) => ({ originalIndex }), + newUrlTrigger: true, + cancelProposingUrlTrigger: true, + + setUrlBlocklistConfig: (urlBlocklistConfig: SessionReplayUrlTriggerConfig[]) => ({ urlBlocklistConfig }), + addUrlBlocklist: (urlBlocklistConfig: SessionReplayUrlTriggerConfig) => ({ urlBlocklistConfig }), + removeUrlBlocklist: (index: number) => ({ index }), + updateUrlBlocklist: (index: number, urlBlocklistConfig: SessionReplayUrlTriggerConfig) => ({ + index, + urlBlocklistConfig, + }), + setEditUrlBlocklistIndex: (originalIndex: number | null) => ({ originalIndex }), + newUrlBlocklist: true, + cancelProposingUrlBlocklist: true, + setEventTriggerConfig: (eventTriggerConfig: string[]) => ({ eventTriggerConfig }), + updateEventTriggerConfig: (eventTriggerConfig: string[]) => ({ eventTriggerConfig }), + }), + connect({ values: [teamLogic, ['currentTeam']], actions: [teamLogic, ['updateCurrentTeam']] }), + reducers({ + urlTriggerConfig: [ + null as SessionReplayUrlTriggerConfig[] | null, + { + setUrlTriggerConfig: (_, { urlTriggerConfig }) => urlTriggerConfig, + addUrlTrigger: (state, { urlTriggerConfig }) => [...(state ?? []), urlTriggerConfig], + updateUrlTrigger: (state, { index, urlTriggerConfig: newUrlTriggerConfig }) => + (state ?? []).map((triggerConfig, i) => (i === index ? newUrlTriggerConfig : triggerConfig)), + removeUrlTrigger: (state, { index }) => { + return (state ?? []).filter((_, i) => i !== index) + }, + }, + ], + editUrlTriggerIndex: [ + null as number | null, + { + setEditUrlTriggerIndex: (_, { originalIndex }) => originalIndex, + removeUrlTrigger: (editUrlTriggerIndex, { index }) => + editUrlTriggerIndex && index < editUrlTriggerIndex + ? editUrlTriggerIndex - 1 + : index === editUrlTriggerIndex + ? null + : editUrlTriggerIndex, + newUrlTrigger: () => -1, + updateUrlTrigger: () => null, + addUrlTrigger: () => null, + cancelProposingUrlTrigger: () => null, + }, + ], + urlBlocklistConfig: [ + null as SessionReplayUrlTriggerConfig[] | null, + { + setUrlBlocklistConfig: (_, { urlBlocklistConfig }) => urlBlocklistConfig, + addUrlBlocklist: (state, { urlBlocklistConfig }) => [...(state ?? []), urlBlocklistConfig], + updateUrlBlocklist: (state, { index, urlBlocklistConfig: newUrlBlocklistConfig }) => + (state ?? []).map((blocklistConfig, i) => (i === index ? newUrlBlocklistConfig : blocklistConfig)), + removeUrlBlocklist: (state, { index }) => { + return (state ?? []).filter((_, i) => i !== index) + }, + }, + ], + editUrlBlocklistIndex: [ + null as number | null, + { + setEditUrlBlocklistIndex: (_, { originalIndex }) => originalIndex, + removeUrlBlocklist: (editUrlBlocklistIndex, { index }) => + editUrlBlocklistIndex && index < editUrlBlocklistIndex + ? editUrlBlocklistIndex - 1 + : index === editUrlBlocklistIndex + ? null + : editUrlBlocklistIndex, + newUrlBlocklist: () => -1, + updateUrlBlocklist: () => null, + addUrlBlocklist: () => null, + }, + ], + eventTriggerConfig: [ + null as string[] | null, + { + setEventTriggerConfig: (_, { eventTriggerConfig }) => eventTriggerConfig, + updateEventTriggerConfig: (_, { eventTriggerConfig }) => eventTriggerConfig, + }, + ], + }), + selectors({ + remoteUrlTriggerConfig: [ + (s) => [s.currentTeam], + (currentTeam) => currentTeam?.session_recording_url_trigger_config, + ], + isAddUrlTriggerConfigFormVisible: [ + (s) => [s.editUrlTriggerIndex], + (editUrlTriggerIndex) => editUrlTriggerIndex === -1, + ], + urlTriggerToEdit: [ + (s) => [s.urlTriggerConfig, s.editUrlTriggerIndex], + (urlTriggerConfig, editUrlTriggerIndex) => { + if ( + editUrlTriggerIndex === null || + editUrlTriggerIndex === -1 || + !urlTriggerConfig?.[editUrlTriggerIndex] + ) { + return NEW_URL_TRIGGER + } + return urlTriggerConfig[editUrlTriggerIndex] + }, + ], + + remoteUrlBlocklistConfig: [ + (s) => [s.currentTeam], + (currentTeam) => currentTeam?.session_recording_url_blocklist_config, + ], + isAddUrlBlocklistConfigFormVisible: [ + (s) => [s.editUrlBlocklistIndex], + (editUrlBlocklistIndex) => editUrlBlocklistIndex === -1, + ], + urlBlocklistToEdit: [ + (s) => [s.urlBlocklistConfig, s.editUrlBlocklistIndex], + (urlBlocklistConfig, editUrlBlocklistIndex) => { + if ( + editUrlBlocklistIndex === null || + editUrlBlocklistIndex === -1 || + !urlBlocklistConfig?.[editUrlBlocklistIndex] + ) { + return NEW_URL_TRIGGER + } + return urlBlocklistConfig[editUrlBlocklistIndex] + }, + ], + }), + subscriptions(({ actions }) => ({ + currentTeam: (currentTeam: TeamPublicType | TeamType | null) => { + actions.setUrlTriggerConfig(currentTeam?.session_recording_url_trigger_config ?? []) + actions.setUrlBlocklistConfig(currentTeam?.session_recording_url_blocklist_config ?? []) + actions.setEventTriggerConfig(currentTeam?.session_recording_event_trigger_config ?? []) + }, + })), + forms(({ values, actions }) => ({ + proposedUrlTrigger: { + defaults: { url: '', matching: 'regex' } as SessionReplayUrlTriggerConfig, + errors: ({ url }) => ({ + url: !url ? 'Must have a URL' : undefined, + }), + submit: async ({ url, matching }) => { + if (values.editUrlTriggerIndex !== null && values.editUrlTriggerIndex >= 0) { + actions.updateUrlTrigger(values.editUrlTriggerIndex, { url: ensureAnchored(url), matching }) + } else { + actions.addUrlTrigger({ url: ensureAnchored(url), matching }) + } + }, + }, + proposedUrlBlocklist: { + defaults: { url: '', matching: 'regex' } as SessionReplayUrlTriggerConfig, + errors: ({ url }) => ({ + url: !url ? 'Must have a URL' : undefined, + }), + submit: async ({ url, matching }) => { + if (values.editUrlBlocklistIndex !== null && values.editUrlBlocklistIndex >= 0) { + actions.updateUrlBlocklist(values.editUrlBlocklistIndex, { url: ensureAnchored(url), matching }) + } else { + actions.addUrlBlocklist({ url: ensureAnchored(url), matching }) + } + }, + }, + })), + sharedListeners(({ values }) => ({ + saveUrlTriggers: async () => { + await teamLogic.asyncActions.updateCurrentTeam({ + session_recording_url_trigger_config: values.urlTriggerConfig ?? [], + }) + }, + saveUrlBlocklists: async () => { + await teamLogic.asyncActions.updateCurrentTeam({ + session_recording_url_blocklist_config: values.urlBlocklistConfig ?? [], + }) + }, + })), + listeners(({ sharedListeners, actions, values }) => ({ + setEditUrlTriggerIndex: () => { + actions.setProposedUrlTriggerValue('url', values.urlTriggerToEdit.url) + actions.setProposedUrlTriggerValue('matching', values.urlTriggerToEdit.matching) + }, + addUrlTrigger: sharedListeners.saveUrlTriggers, + removeUrlTrigger: sharedListeners.saveUrlTriggers, + updateUrlTrigger: sharedListeners.saveUrlTriggers, + submitProposedUrlTriggerSuccess: () => { + actions.setEditUrlTriggerIndex(null) + actions.resetProposedUrlTrigger() + }, + + setEditUrlBlocklistIndex: () => { + actions.setProposedUrlBlocklistValue('url', values.urlBlocklistToEdit.url) + actions.setProposedUrlBlocklistValue('matching', values.urlBlocklistToEdit.matching) + }, + addUrlBlocklist: sharedListeners.saveUrlBlocklists, + removeUrlBlocklist: sharedListeners.saveUrlBlocklists, + updateUrlBlocklist: sharedListeners.saveUrlBlocklists, + submitProposedUrlBlocklistSuccess: () => { + actions.setEditUrlBlocklistIndex(null) + actions.resetProposedUrlBlocklist() + }, + updateEventTriggerConfig: async ({ eventTriggerConfig }) => { + actions.setEventTriggerConfig(eventTriggerConfig) + await teamLogic.asyncActions.updateCurrentTeam({ + session_recording_event_trigger_config: eventTriggerConfig, + }) + }, + })), +]) diff --git a/frontend/src/scenes/settings/environment/sessionReplayIngestionControlLogic.ts b/frontend/src/scenes/settings/environment/sessionReplayIngestionControlLogic.ts index 9432c128ac6813..a5a66ad5891ec7 100644 --- a/frontend/src/scenes/settings/environment/sessionReplayIngestionControlLogic.ts +++ b/frontend/src/scenes/settings/environment/sessionReplayIngestionControlLogic.ts @@ -1,46 +1,18 @@ -import { actions, afterMount, connect, kea, listeners, path, props, reducers, selectors, sharedListeners } from 'kea' -import { forms } from 'kea-forms' +import { actions, afterMount, connect, kea, path, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' -import { subscriptions } from 'kea-subscriptions' import api from 'lib/api' import { isObject } from 'lib/utils' import { variantKeyToIndexFeatureFlagPayloads } from 'scenes/feature-flags/featureFlagLogic' import { teamLogic } from 'scenes/teamLogic' -import { FeatureFlagBasicType, SessionReplayUrlTriggerConfig, TeamPublicType, TeamType } from '~/types' +import { FeatureFlagBasicType } from '~/types' import type { sessionReplayIngestionControlLogicType } from './sessionReplayIngestionControlLogicType' -const NEW_URL_TRIGGER = { url: '', matching: 'regex' } - export const sessionReplayIngestionControlLogic = kea([ path(['scenes', 'settings', 'project', 'sessionReplayIngestionControlLogic']), actions({ selectFeatureFlag: (flag: FeatureFlagBasicType) => ({ flag }), - - setUrlTriggerConfig: (urlTriggerConfig: SessionReplayUrlTriggerConfig[]) => ({ urlTriggerConfig }), - addUrlTrigger: (urlTriggerConfig: SessionReplayUrlTriggerConfig) => ({ urlTriggerConfig }), - removeUrlTrigger: (index: number) => ({ index }), - updateUrlTrigger: (index: number, urlTriggerConfig: SessionReplayUrlTriggerConfig) => ({ - index, - urlTriggerConfig, - }), - setEditUrlTriggerIndex: (originalIndex: number | null) => ({ originalIndex }), - newUrlTrigger: true, - cancelProposingUrlTrigger: true, - - setUrlBlocklistConfig: (urlBlocklistConfig: SessionReplayUrlTriggerConfig[]) => ({ urlBlocklistConfig }), - addUrlBlocklist: (urlBlocklistConfig: SessionReplayUrlTriggerConfig) => ({ urlBlocklistConfig }), - removeUrlBlocklist: (index: number) => ({ index }), - updateUrlBlocklist: (index: number, urlBlocklistConfig: SessionReplayUrlTriggerConfig) => ({ - index, - urlBlocklistConfig, - }), - setEditUrlBlocklistIndex: (originalIndex: number | null) => ({ originalIndex }), - newUrlBlocklist: true, - cancelProposingUrlBlocklist: true, - setEventTriggerConfig: (eventTriggerConfig: string[]) => ({ eventTriggerConfig }), - updateEventTriggerConfig: (eventTriggerConfig: string[]) => ({ eventTriggerConfig }), }), connect({ values: [teamLogic, ['currentTeam']], actions: [teamLogic, ['updateCurrentTeam']] }), reducers({ @@ -50,70 +22,7 @@ export const sessionReplayIngestionControlLogic = kea flag, }, ], - urlTriggerConfig: [ - null as SessionReplayUrlTriggerConfig[] | null, - { - setUrlTriggerConfig: (_, { urlTriggerConfig }) => urlTriggerConfig, - addUrlTrigger: (state, { urlTriggerConfig }) => [...(state ?? []), urlTriggerConfig], - updateUrlTrigger: (state, { index, urlTriggerConfig: newUrlTriggerConfig }) => - (state ?? []).map((triggerConfig, i) => (i === index ? newUrlTriggerConfig : triggerConfig)), - removeUrlTrigger: (state, { index }) => { - return (state ?? []).filter((_, i) => i !== index) - }, - }, - ], - editUrlTriggerIndex: [ - null as number | null, - { - setEditUrlTriggerIndex: (_, { originalIndex }) => originalIndex, - removeUrlTrigger: (editUrlTriggerIndex, { index }) => - editUrlTriggerIndex && index < editUrlTriggerIndex - ? editUrlTriggerIndex - 1 - : index === editUrlTriggerIndex - ? null - : editUrlTriggerIndex, - newUrlTrigger: () => -1, - updateUrlTrigger: () => null, - addUrlTrigger: () => null, - cancelProposingUrlTrigger: () => null, - }, - ], - urlBlocklistConfig: [ - null as SessionReplayUrlTriggerConfig[] | null, - { - setUrlBlocklistConfig: (_, { urlBlocklistConfig }) => urlBlocklistConfig, - addUrlBlocklist: (state, { urlBlocklistConfig }) => [...(state ?? []), urlBlocklistConfig], - updateUrlBlocklist: (state, { index, urlBlocklistConfig: newUrlBlocklistConfig }) => - (state ?? []).map((blocklistConfig, i) => (i === index ? newUrlBlocklistConfig : blocklistConfig)), - removeUrlBlocklist: (state, { index }) => { - return (state ?? []).filter((_, i) => i !== index) - }, - }, - ], - editUrlBlocklistIndex: [ - null as number | null, - { - setEditUrlBlocklistIndex: (_, { originalIndex }) => originalIndex, - removeUrlBlocklist: (editUrlBlocklistIndex, { index }) => - editUrlBlocklistIndex && index < editUrlBlocklistIndex - ? editUrlBlocklistIndex - 1 - : index === editUrlBlocklistIndex - ? null - : editUrlBlocklistIndex, - newUrlBlocklist: () => -1, - updateUrlBlocklist: () => null, - addUrlBlocklist: () => null, - }, - ], - eventTriggerConfig: [ - null as string[] | null, - { - setEventTriggerConfig: (_, { eventTriggerConfig }) => eventTriggerConfig, - updateEventTriggerConfig: (_, { eventTriggerConfig }) => eventTriggerConfig, - }, - ], }), - props({}), loaders(({ values }) => ({ featureFlag: { loadFeatureFlag: async () => { @@ -140,124 +49,8 @@ export const sessionReplayIngestionControlLogic = kea [s.linkedFlag], (linkedFlag) => isObject(linkedFlag?.filters.multivariate)], - - remoteUrlTriggerConfig: [ - (s) => [s.currentTeam], - (currentTeam) => currentTeam?.session_recording_url_trigger_config, - ], - isAddUrlTriggerConfigFormVisible: [ - (s) => [s.editUrlTriggerIndex], - (editUrlTriggerIndex) => editUrlTriggerIndex === -1, - ], - urlTriggerToEdit: [ - (s) => [s.urlTriggerConfig, s.editUrlTriggerIndex], - (urlTriggerConfig, editUrlTriggerIndex) => { - if ( - editUrlTriggerIndex === null || - editUrlTriggerIndex === -1 || - !urlTriggerConfig?.[editUrlTriggerIndex] - ) { - return NEW_URL_TRIGGER - } - return urlTriggerConfig[editUrlTriggerIndex] - }, - ], - - remoteUrlBlocklistConfig: [ - (s) => [s.currentTeam], - (currentTeam) => currentTeam?.session_recording_url_blocklist_config, - ], - isAddUrlBlocklistConfigFormVisible: [ - (s) => [s.editUrlBlocklistIndex], - (editUrlBlocklistIndex) => editUrlBlocklistIndex === -1, - ], - urlBlocklistToEdit: [ - (s) => [s.urlBlocklistConfig, s.editUrlBlocklistIndex], - (urlBlocklistConfig, editUrlBlocklistIndex) => { - if ( - editUrlBlocklistIndex === null || - editUrlBlocklistIndex === -1 || - !urlBlocklistConfig?.[editUrlBlocklistIndex] - ) { - return NEW_URL_TRIGGER - } - return urlBlocklistConfig[editUrlBlocklistIndex] - }, - ], }), afterMount(({ actions }) => { actions.loadFeatureFlag() }), - subscriptions(({ actions }) => ({ - currentTeam: (currentTeam: TeamPublicType | TeamType | null) => { - actions.setUrlTriggerConfig(currentTeam?.session_recording_url_trigger_config ?? []) - actions.setUrlBlocklistConfig(currentTeam?.session_recording_url_blocklist_config ?? []) - actions.setEventTriggerConfig(currentTeam?.session_recording_event_trigger_config ?? []) - }, - })), - forms(({ values, actions }) => ({ - proposedUrlTrigger: { - defaults: { url: '', matching: 'regex' } as SessionReplayUrlTriggerConfig, - submit: async ({ url, matching }) => { - if (values.editUrlTriggerIndex !== null && values.editUrlTriggerIndex >= 0) { - actions.updateUrlTrigger(values.editUrlTriggerIndex, { url, matching }) - } else { - actions.addUrlTrigger({ url, matching }) - } - }, - }, - proposedUrlBlocklist: { - defaults: { url: '', matching: 'regex' } as SessionReplayUrlTriggerConfig, - submit: async ({ url, matching }) => { - if (values.editUrlBlocklistIndex !== null && values.editUrlBlocklistIndex >= 0) { - actions.updateUrlBlocklist(values.editUrlBlocklistIndex, { url, matching }) - } else { - actions.addUrlBlocklist({ url, matching }) - } - }, - }, - })), - sharedListeners(({ values }) => ({ - saveUrlTriggers: async () => { - await teamLogic.asyncActions.updateCurrentTeam({ - session_recording_url_trigger_config: values.urlTriggerConfig ?? [], - }) - }, - saveUrlBlocklists: async () => { - await teamLogic.asyncActions.updateCurrentTeam({ - session_recording_url_blocklist_config: values.urlBlocklistConfig ?? [], - }) - }, - })), - listeners(({ sharedListeners, actions, values }) => ({ - setEditUrlTriggerIndex: () => { - actions.setProposedUrlTriggerValue('url', values.urlTriggerToEdit.url) - actions.setProposedUrlTriggerValue('matching', values.urlTriggerToEdit.matching) - }, - addUrlTrigger: sharedListeners.saveUrlTriggers, - removeUrlTrigger: sharedListeners.saveUrlTriggers, - updateUrlTrigger: sharedListeners.saveUrlTriggers, - submitProposedUrlTriggerSuccess: () => { - actions.setEditUrlTriggerIndex(null) - actions.resetProposedUrlTrigger() - }, - - setEditUrlBlocklistIndex: () => { - actions.setProposedUrlBlocklistValue('url', values.urlBlocklistToEdit.url) - actions.setProposedUrlBlocklistValue('matching', values.urlBlocklistToEdit.matching) - }, - addUrlBlocklist: sharedListeners.saveUrlBlocklists, - removeUrlBlocklist: sharedListeners.saveUrlBlocklists, - updateUrlBlocklist: sharedListeners.saveUrlBlocklists, - submitProposedUrlBlocklistSuccess: () => { - actions.setEditUrlBlocklistIndex(null) - actions.resetProposedUrlBlocklist() - }, - updateEventTriggerConfig: async ({ eventTriggerConfig }) => { - actions.setEventTriggerConfig(eventTriggerConfig) - await teamLogic.asyncActions.updateCurrentTeam({ - session_recording_event_trigger_config: eventTriggerConfig, - }) - }, - })), ]) diff --git a/frontend/src/scenes/settings/settingsLogic.ts b/frontend/src/scenes/settings/settingsLogic.ts index 37d0a4bc7ce312..c850cba129c3ff 100644 --- a/frontend/src/scenes/settings/settingsLogic.ts +++ b/frontend/src/scenes/settings/settingsLogic.ts @@ -3,6 +3,7 @@ import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { copyToClipboard } from 'lib/utils/copyToClipboard' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' +import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' @@ -17,7 +18,16 @@ export const settingsLogic = kea([ key((props) => props.logicKey ?? 'global'), path((key) => ['scenes', 'settings', 'settingsLogic', key]), connect({ - values: [featureFlagLogic, ['featureFlags'], userLogic, ['hasAvailableFeature'], preflightLogic, ['preflight']], + values: [ + featureFlagLogic, + ['featureFlags'], + userLogic, + ['hasAvailableFeature'], + preflightLogic, + ['preflight'], + teamLogic, + ['currentTeam'], + ], }), actions({ @@ -135,8 +145,24 @@ export const settingsLogic = kea([ }, ], settings: [ - (s) => [s.selectedLevel, s.selectedSectionId, s.sections, s.settingId, s.doesMatchFlags, s.preflight], - (selectedLevel, selectedSectionId, sections, settingId, doesMatchFlags, preflight): Setting[] => { + (s) => [ + s.selectedLevel, + s.selectedSectionId, + s.sections, + s.settingId, + s.doesMatchFlags, + s.preflight, + s.currentTeam, + ], + ( + selectedLevel, + selectedSectionId, + sections, + settingId, + doesMatchFlags, + preflight, + currentTeam + ): Setting[] => { let settings: Setting[] = [] if (selectedSectionId) { @@ -158,6 +184,9 @@ export const settingsLogic = kea([ if (x.hideOn?.includes(Realm.Cloud) && preflight?.cloud) { return false } + if (x.allowForTeam) { + return x.allowForTeam(currentTeam) + } return true }) }, diff --git a/frontend/src/scenes/settings/types.ts b/frontend/src/scenes/settings/types.ts index fa210a263e20c1..01032980772322 100644 --- a/frontend/src/scenes/settings/types.ts +++ b/frontend/src/scenes/settings/types.ts @@ -1,6 +1,6 @@ import { EitherMembershipLevel, FEATURE_FLAGS } from 'lib/constants' -import { Realm } from '~/types' +import { Realm, TeamPublicType, TeamType } from '~/types' export type SettingsLogicProps = { logicKey?: string @@ -46,6 +46,7 @@ export type SettingSectionId = | 'user-customization' export type SettingId = + | 'replay-triggers' | 'display-name' | 'snippet' | 'bookmarklet' @@ -115,6 +116,11 @@ export type Setting = { */ flag?: FeatureFlagKey | `!${FeatureFlagKey}` | (FeatureFlagKey | `!${FeatureFlagKey}`)[] hideOn?: Realm[] + /** + * defaults to true if not provided + * can check if a team should have access to a setting and return false if not + */ + allowForTeam?: (team: TeamType | TeamPublicType | null) => boolean } export interface SettingSection extends Pick { diff --git a/frontend/src/scenes/surveys/SurveyEdit.tsx b/frontend/src/scenes/surveys/SurveyEdit.tsx index 28a2b8d9205218..b6bf810e735778 100644 --- a/frontend/src/scenes/surveys/SurveyEdit.tsx +++ b/frontend/src/scenes/surveys/SurveyEdit.tsx @@ -26,7 +26,7 @@ import { FEATURE_FLAGS } from 'lib/constants' import { dayjs } from 'lib/dayjs' import { IconCancel } from 'lib/lemon-ui/icons' import { LemonField } from 'lib/lemon-ui/LemonField' -import { LemonRadio } from 'lib/lemon-ui/LemonRadio' +import { LemonRadio, LemonRadioOption } from 'lib/lemon-ui/LemonRadio' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { featureFlagLogic as enabledFeaturesLogic } from 'lib/logic/featureFlagLogic' import { formatDate } from 'lib/utils' @@ -100,6 +100,27 @@ export default function SurveyEdit(): JSX.Element { ? undefined : 'Upgrade your plan to use an adaptive limit on survey responses' + const surveyLimitOptions: LemonRadioOption<'until_stopped' | 'until_limit' | 'until_adaptive_limit'>[] = [ + { + value: 'until_stopped', + label: 'Keep collecting responses until the survey is stopped', + 'data-attr': 'survey-collection-until-stopped', + }, + { + value: 'until_limit', + label: 'Stop displaying the survey after reaching a certain number of completed surveys', + 'data-attr': 'survey-collection-until-limit', + }, + ] + + if (featureFlags[FEATURE_FLAGS.SURVEYS_ADAPTIVE_LIMITS]) { + surveyLimitOptions.push({ + value: 'until_adaptive_limit', + label: 'Collect a certain number of surveys per day, week or month', + 'data-attr': 'survey-collection-until-adaptive-limit', + disabledReason: surveysAdaptiveLimitsDisabledReason, + } as unknown as LemonRadioOption<'until_stopped' | 'until_limit' | 'until_adaptive_limit'>) + } useMemo(() => { if (surveyUsesLimit) { setDataCollectionType('until_limit') @@ -912,24 +933,7 @@ export default function SurveyEdit(): JSX.Element { } setDataCollectionType(newValue) }} - options={[ - { - value: 'until_stopped', - label: 'Keep collecting responses until the survey is stopped', - 'data-attr': 'survey-collection-until-stopped', - }, - { - value: 'until_limit', - label: 'Stop displaying the survey after reaching a certain number of completed surveys', - 'data-attr': 'survey-collection-until-limit', - }, - { - value: 'until_adaptive_limit', - label: 'Collect a certain number of surveys per day, week or month', - 'data-attr': 'survey-collection-until-adaptive-limit', - disabledReason: surveysAdaptiveLimitsDisabledReason, - }, - ]} + options={surveyLimitOptions} />
diff --git a/frontend/src/scenes/surveys/Surveys.tsx b/frontend/src/scenes/surveys/Surveys.tsx index 35cbd13688bf9d..b129ff84d514d2 100644 --- a/frontend/src/scenes/surveys/Surveys.tsx +++ b/frontend/src/scenes/surveys/Surveys.tsx @@ -18,6 +18,7 @@ import { MemberSelect } from 'lib/components/MemberSelect' import { PageHeader } from 'lib/components/PageHeader' import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction' import { VersionCheckerBanner } from 'lib/components/VersionChecker/VersionCheckerBanner' +import { FEATURE_FLAGS } from 'lib/constants' import { dayjs } from 'lib/dayjs' import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' @@ -27,6 +28,7 @@ import { LemonTableColumn } from 'lib/lemon-ui/LemonTable' import { createdAtColumn, createdByColumn } from 'lib/lemon-ui/LemonTable/columnUtils' import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink' import { LemonTabs } from 'lib/lemon-ui/LemonTabs' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import stringWithWBR from 'lib/utils/stringWithWBR' import { useState } from 'react' import { LinkedHogFunctions } from 'scenes/pipeline/hogfunctions/list/LinkedHogFunctions' @@ -67,6 +69,8 @@ export function Surveys(): JSX.Element { const { user } = useValues(userLogic) const { updateCurrentTeam } = useActions(teamLogic) const { currentTeam } = useValues(teamLogic) + const { featureFlags } = useValues(featureFlagLogic) + const [editableSurveyConfig, setEditableSurveyConfig] = useState( currentTeam?.survey_config?.appearance || defaultSurveyAppearance ) @@ -78,6 +82,7 @@ export function Surveys(): JSX.Element { } const shouldShowEmptyState = !surveysLoading && surveys.length === 0 const showLinkedHogFunctions = useFeatureFlag('HOG_FUNCTIONS_LINKED') + const settingLevel = featureFlags[FEATURE_FLAGS.ENVIRONMENTS] ? 'environment' : 'project' return (
@@ -224,8 +229,8 @@ export function Surveys(): JSX.Element { }} className="mb-2" > - Survey popovers are currently disabled for this project but there are active surveys - running. Re-enable them in the settings. + Survey popovers are currently disabled for this {settingLevel} but there are active + surveys running. Re-enable them in the settings. ) : null}
diff --git a/frontend/src/scenes/surveys/utils.ts b/frontend/src/scenes/surveys/utils.ts index 7ab39dc59c6e54..4493e1ac943e6b 100644 --- a/frontend/src/scenes/surveys/utils.ts +++ b/frontend/src/scenes/surveys/utils.ts @@ -1,7 +1,7 @@ -import { sanitize } from 'dompurify' +import DOMPurify from 'dompurify' const sanitizeConfig = { ADD_ATTR: ['target'] } export function sanitizeHTML(html: string): string { - return sanitize(html, sanitizeConfig) + return DOMPurify.sanitize(html, sanitizeConfig) } diff --git a/frontend/src/scenes/trends/mathsLogic.tsx b/frontend/src/scenes/trends/mathsLogic.tsx index 04756fd135dd93..7f907d3f1f2464 100644 --- a/frontend/src/scenes/trends/mathsLogic.tsx +++ b/frontend/src/scenes/trends/mathsLogic.tsx @@ -145,12 +145,29 @@ export const BASE_MATH_DEFINITIONS: Record = { shortName: 'first time', description: ( <> - Only count events if users do it for the first time. + Only the first time the user performed this event will count, and only if it matches the event filters.

- Example: If a single user performs an event for the first time ever within a given period, it counts - as 1. Subsequent events by the same user will not be counted. + Example: If the we are looking for pageview events to posthog.com/about, but the user's first + pageview was on posthog.com, it will not match, even if they went to posthog.com/about afterwards. + + + ), + category: MathCategory.EventCount, + }, + [BaseMathType.FirstMatchingEventForUser]: { + name: 'First matching event for user', + shortName: 'first matching event', + description: ( + <> + The first time the user performed this event that matches the event filters will count. +
+
+ + Example: If the we are looking for pageview events to posthog.com/about, and the user's first + pageview was on posthog.com but then they navigated to posthog.com/about, it will match the pageview + event from posthog.com/about ), diff --git a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsRecordings.tsx b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsRecordings.tsx index ee32168dd78178..b04d7cdaa8953f 100644 --- a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsRecordings.tsx +++ b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsRecordings.tsx @@ -16,6 +16,7 @@ export function WebAnalyticsRecordingsTile({ tile }: { tile: ReplayTile }): JSX. const { layout } = tile const { replayFilters, webAnalyticsFilters } = useValues(webAnalyticsLogic) const { currentTeam } = useValues(teamLogic) + const sessionRecordingsListLogicInstance = sessionRecordingsPlaylistLogic({ logicKey: 'webAnalytics', filters: replayFilters, diff --git a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx index 63ef78e423bcea..cda07bc69ee55d 100644 --- a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx +++ b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx @@ -321,6 +321,11 @@ export const webAnalyticsDataTableQueryContext: QueryContext = { render: VariationCell(), align: 'right', }, + unique_conversions: { + title: Unique Conversions, + render: VariationCell(), + align: 'right', + }, conversion_rate: { title: Conversion Rate, render: VariationCell({ isPercentage: true }), diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx b/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx index cec7aad2059eec..a535b5b54ed766 100644 --- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx +++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx @@ -45,6 +45,7 @@ import { PropertyOperator, RecordingUniversalFilters, RetentionPeriod, + UniversalFiltersGroupValue, } from '~/types' import type { webAnalyticsLogicType } from './webAnalyticsLogicType' @@ -622,6 +623,9 @@ export const webAnalyticsLogic = kea([ }, compareFilter: compareFilter || { compare: false }, filterTestAccounts, + conversionGoal: featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOAL_FILTERS] + ? conversionGoal + : undefined, properties: webAnalyticsFilters, }, hidePersonsModal: true, @@ -662,6 +666,9 @@ export const webAnalyticsLogic = kea([ compareFilter, limit: 10, filterTestAccounts, + conversionGoal: featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOAL_FILTERS] + ? conversionGoal + : undefined, ...(source || {}), }, embedded: false, @@ -783,14 +790,21 @@ export const webAnalyticsLogic = kea([ accessed in your application, regardless of when they were accessed through the lifetime of a user session.

-

- The{' '} - - bounce rate - {' '} - indicates the percentage of users who left your page immediately - after visiting without capturing any event. -

+ {conversionGoal ? ( +

+ The conversion rate is the percentage of users who completed + the conversion goal in this specific path. +

+ ) : ( +

+ The{' '} + + bounce rate + {' '} + indicates the percentage of users who left your page + immediately after visiting without capturing any event. +

+ )}
), }, @@ -814,8 +828,17 @@ export const webAnalyticsLogic = kea([ title: 'Entry Path', description: (
- Entry paths are the paths a user session started, i.e. the first - path they saw when they opened your website. +

+ Entry paths are the paths a user session started, i.e. the first + path they saw when they opened your website. +

+ {conversionGoal && ( +

+ The conversion rate is the percentage of users who completed + the conversion goal after the first path in their session + being this path. +

+ )}
), }, @@ -862,6 +885,11 @@ export const webAnalyticsLogic = kea([ sampling, limit: 10, filterTestAccounts, + conversionGoal: featureFlags[ + FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOAL_FILTERS + ] + ? conversionGoal + : undefined, stripQueryParams: shouldStripQueryParams, }, embedded: false, @@ -1136,6 +1164,11 @@ export const webAnalyticsLogic = kea([ trendsFilter: { display: ChartDisplayType.WorldMap, }, + conversionGoal: featureFlags[ + FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOAL_FILTERS + ] + ? conversionGoal + : undefined, filterTestAccounts, properties: webAnalyticsFilters, }, @@ -1183,63 +1216,66 @@ export const webAnalyticsLogic = kea([ ], } : null, - { - kind: 'query', - tileId: TileId.RETENTION, - title: 'Retention', - layout: { - colSpanClassName: 'md:col-span-2', - }, - query: { - kind: NodeKind.InsightVizNode, - source: { - kind: NodeKind.RetentionQuery, - properties: webAnalyticsFilters, - dateRange, - filterTestAccounts, - retentionFilter: { - retentionType: RETENTION_FIRST_TIME, - retentionReference: 'total', - totalIntervals: isGreaterThanMd ? 8 : 5, - period: RetentionPeriod.Week, - }, - }, - vizSpecificOptions: { - [InsightType.RETENTION]: { - hideLineGraph: true, - hideSizeColumn: !isGreaterThanMd, - useSmallLayout: !isGreaterThanMd, - }, - }, - embedded: true, - }, - insightProps: createInsightProps(TileId.RETENTION), - canOpenInsight: false, - canOpenModal: true, - docs: { - url: 'https://posthog.com/docs/web-analytics/dashboard#retention', - title: 'Retention', - description: ( - <> -
-

- Retention creates a cohort of unique users who performed any event for the - first time in the last week. It then tracks the percentage of users who - return to perform any event in the following weeks. -

-

- You want the numbers numbers to be the highest possible, suggesting that - people that come to your page continue coming to your page - and performing - an actions. Also, the further down the table the higher the numbers should - be (or at least as high), which would indicate that you're either increasing - or keeping your retention at the same level. -

-
- - ), - }, - }, - featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOALS] + !conversionGoal + ? { + kind: 'query', + tileId: TileId.RETENTION, + title: 'Retention', + layout: { + colSpanClassName: 'md:col-span-2', + }, + query: { + kind: NodeKind.InsightVizNode, + source: { + kind: NodeKind.RetentionQuery, + properties: webAnalyticsFilters, + dateRange, + filterTestAccounts, + retentionFilter: { + retentionType: RETENTION_FIRST_TIME, + retentionReference: 'total', + totalIntervals: isGreaterThanMd ? 8 : 5, + period: RetentionPeriod.Week, + }, + }, + vizSpecificOptions: { + [InsightType.RETENTION]: { + hideLineGraph: true, + hideSizeColumn: !isGreaterThanMd, + useSmallLayout: !isGreaterThanMd, + }, + }, + embedded: true, + }, + insightProps: createInsightProps(TileId.RETENTION), + canOpenInsight: false, + canOpenModal: true, + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#retention', + title: 'Retention', + description: ( + <> +
+

+ Retention creates a cohort of unique users who performed any event for + the first time in the last week. It then tracks the percentage of + users who return to perform any event in the following weeks. +

+

+ You want the numbers numbers to be the highest possible, suggesting + that people that come to your page continue coming to your page - and + performing an actions. Also, the further down the table the higher the + numbers should be (or at least as high), which would indicate that + you're either increasing or keeping your retention at the same level. +

+
+ + ), + }, + } + : null, + // Hiding if conversionGoal is set already because values aren't representative + !conversionGoal && featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOALS] ? { kind: 'query', tileId: TileId.GOALS, @@ -1290,7 +1326,7 @@ export const webAnalyticsLogic = kea([ kind: 'replay', tileId: TileId.REPLAY, layout: { - colSpanClassName: 'md:col-span-1', + colSpanClassName: conversionGoal ? 'md:col-span-full' : 'md:col-span-1', }, docs: { url: 'https://posthog.com/docs/session-replay', @@ -1300,7 +1336,7 @@ export const webAnalyticsLogic = kea([ }, } : null, - featureFlags[FEATURE_FLAGS.ERROR_TRACKING] + !conversionGoal && featureFlags[FEATURE_FLAGS.ERROR_TRACKING] ? { kind: 'error_tracking', tileId: TileId.ERROR_TRACKING, @@ -1433,12 +1469,31 @@ export const webAnalyticsLogic = kea([ }, ], replayFilters: [ - (s) => [s.webAnalyticsFilters, s.dateFilter, s.shouldFilterTestAccounts], + (s) => [s.webAnalyticsFilters, s.dateFilter, s.shouldFilterTestAccounts, s.conversionGoal, s.featureFlags], ( webAnalyticsFilters: WebAnalyticsPropertyFilters, dateFilter, - shouldFilterTestAccounts + shouldFilterTestAccounts, + conversionGoal, + featureFlags ): RecordingUniversalFilters => { + const filters: UniversalFiltersGroupValue[] = [...webAnalyticsFilters] + if (conversionGoal && featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOAL_FILTERS]) { + if ('actionId' in conversionGoal) { + filters.push({ + id: conversionGoal.actionId, + name: String(conversionGoal.actionId), + type: 'actions', + }) + } else if ('customEventName' in conversionGoal) { + filters.push({ + id: conversionGoal.customEventName, + name: conversionGoal.customEventName, + type: 'events', + }) + } + } + return { filter_test_accounts: shouldFilterTestAccounts, @@ -1449,7 +1504,7 @@ export const webAnalyticsLogic = kea([ values: [ { type: FilterLogicalOperator.And, - values: webAnalyticsFilters || [], + values: filters, }, ], }, diff --git a/frontend/src/styles/global.scss b/frontend/src/styles/global.scss index 3d59bb5f18d715..0114e54cf72c2f 100644 --- a/frontend/src/styles/global.scss +++ b/frontend/src/styles/global.scss @@ -207,6 +207,7 @@ Only 400 (`normal`), 500 (`var(--font-medium)`), 600 (`var(--font-semibold)`), o --content-link: var(--brand-500); --content-link-hover: var(--brand-400); --content-link-pressed: var(--brand-600); + --content-warning: var(--orange-400); --content-warning-bold: var(--orange-700); --content-danger: var(--red-500); --content-danger-bold: var(--red-600); @@ -577,6 +578,7 @@ body { &[theme='dark'] { // Semantic colors (Dark mode) WIP --content-primary: var(--neutral-cool-100); + --content-warning: var(--orange-300); --content-warning-bold: var(--orange-100); --content-danger-bold: var(--red-100); --content-success-bold: var(--green-100); diff --git a/frontend/src/toolbar/ToolbarApp.tsx b/frontend/src/toolbar/ToolbarApp.tsx index f623736bd9c2be..80e43b38868a27 100644 --- a/frontend/src/toolbar/ToolbarApp.tsx +++ b/frontend/src/toolbar/ToolbarApp.tsx @@ -40,9 +40,19 @@ export function ToolbarApp(props: ToolbarProps = {}): JSX.Element { } ) + // There's a small conflict between our toolbar and the Tanstack React Dev library + // because Tanstack is polluting the global event listeners with a mouse down listener + // which conflicts with our toolbar's internal mouse down listeners + // + // To workaround that we simply prevent the event from bubbling further than the toolbar + // See https://github.com/PostHog/posthog-js/issues/1425 + const onMouseDown = ({ nativeEvent: event }: React.MouseEvent): void => { + event.stopImmediatePropagation() + } + return ( <> - +
{didRender && (didLoadStyles || props.disableExternalStyles) ? : null} -
+

{transform.selector || 'Select element'}

diff --git a/frontend/src/types.ts b/frontend/src/types.ts index d98d05d8d61b51..514caa36357264 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -196,6 +196,7 @@ export enum ProductKey { PLATFORM_AND_SUPPORT = 'platform_and_support', TEAMS = 'teams', WEB_ANALYTICS = 'web_analytics', + ERROR_TRACKING = 'error_tracking', } type ProductKeyUnion = `${ProductKey}` @@ -2994,6 +2995,19 @@ export interface FeatureFlagRollbackConditions { operator?: string } +export enum FeatureFlagStatus { + ACTIVE = 'active', + INACTIVE = 'inactive', + STALE = 'stale', + DELETED = 'deleted', + UNKNOWN = 'unknown', +} + +export interface FeatureFlagStatusResponse { + status: FeatureFlagStatus + reason: string +} + export interface CombinedFeatureFlagAndValueType { feature_flag: FeatureFlagType value: boolean | string @@ -3613,6 +3627,7 @@ export enum BaseMathType { MonthlyActiveUsers = 'monthly_active', UniqueSessions = 'unique_session', FirstTimeForUser = 'first_time_for_user', + FirstMatchingEventForUser = 'first_matching_event_for_user', } export enum PropertyMathType { @@ -4590,6 +4605,7 @@ export type HogFunctionInputSchemaType = { integration?: string integration_key?: string integration_field?: 'slack_channel' + requiredScopes?: string } export type HogFunctionInputType = { @@ -4639,10 +4655,21 @@ export interface HogFunctionFiltersType { bytecode_error?: string } +export interface HogFunctionMappingType { + inputs_schema?: HogFunctionInputSchemaType[] + inputs?: Record | null + filters?: HogFunctionFiltersType | null +} +export interface HogFunctionMappingTemplateType extends HogFunctionMappingType { + name: string + include_by_default?: boolean +} + export type HogFunctionTypeType = | 'destination' | 'site_destination' | 'site_app' + | 'transformation' | 'email' | 'sms' | 'push' @@ -4664,6 +4691,7 @@ export type HogFunctionType = { inputs_schema?: HogFunctionInputSchemaType[] inputs?: Record | null + mappings?: HogFunctionMappingType[] | null masking?: HogFunctionMasking | null filters?: HogFunctionFiltersType | null template?: HogFunctionTemplateType @@ -4681,7 +4709,7 @@ export type HogFunctionConfigurationType = Omit< sub_template_id?: HogFunctionSubTemplateIdType } -export type HogFunctionSubTemplateType = Pick & { +export type HogFunctionSubTemplateType = Pick & { id: HogFunctionSubTemplateIdType name: string description: string | null @@ -4689,10 +4717,11 @@ export type HogFunctionSubTemplateType = Pick & { status: HogFunctionTemplateStatus sub_templates?: HogFunctionSubTemplateType[] + mapping_templates?: HogFunctionMappingTemplateType[] } export type HogFunctionIconResponse = { diff --git a/funnel-udf/src/steps.rs b/funnel-udf/src/steps.rs index 21d6fb84e62e87..e2ba45ae02cd08 100644 --- a/funnel-udf/src/steps.rs +++ b/funnel-udf/src/steps.rs @@ -217,7 +217,7 @@ impl AggregateFunnelRow { } } } else { - let is_unmatched_step_attribution = self.breakdown_step.map(|breakdown_step| step == breakdown_step - 1).unwrap_or(false) && *prop_val != event.breakdown; + let is_unmatched_step_attribution = self.breakdown_step.map(|breakdown_step| step - 1 == breakdown_step).unwrap_or(false) && *prop_val != event.breakdown; let already_used_event = processing_multiple_events && vars.entered_timestamp[step - 1].uuids.contains(&event.uuid); if !is_unmatched_step_attribution && !already_used_event { let new_entered_timestamp = |vars: &Vars| -> EnteredTimestamp { diff --git a/funnel-udf/src/trends.rs b/funnel-udf/src/trends.rs index 42356dc06d1aa2..737bb82f48b8c2 100644 --- a/funnel-udf/src/trends.rs +++ b/funnel-udf/src/trends.rs @@ -66,13 +66,15 @@ struct IntervalData { entered_timestamp: Vec, } +type ResultsMap = HashMap; + struct Vars { interval_start_to_entered_timestamps: HashMap, + results: ResultsMap, } struct AggregateFunnelRow { breakdown_step: Option, - results: HashMap, } const DEFAULT_ENTERED_TIMESTAMP: EnteredTimestamp = EnteredTimestamp { @@ -83,11 +85,9 @@ const DEFAULT_ENTERED_TIMESTAMP: EnteredTimestamp = EnteredTimestamp { pub fn process_line(line: &str) -> Value { let args = parse_args(line); let mut aggregate_funnel_row = AggregateFunnelRow { - results: HashMap::new(), breakdown_step: Option::None, }; - aggregate_funnel_row.calculate_funnel_from_user_events(&args); - let result: Vec = aggregate_funnel_row.results.into_values().collect(); + let result: Vec = aggregate_funnel_row.calculate_funnel_from_user_events(&args); json!({ "result": result }) } @@ -98,17 +98,21 @@ fn parse_args(line: &str) -> Args { impl AggregateFunnelRow { #[inline(always)] - fn calculate_funnel_from_user_events(&mut self, args: &Args) { + fn calculate_funnel_from_user_events(&mut self, args: &Args) -> Vec { if args.breakdown_attribution_type.starts_with("step_") { self.breakdown_step = args.breakdown_attribution_type[5..].parse::().ok() } - args.prop_vals.iter().for_each(|prop_val| self.loop_prop_val(args, prop_val)); + args.prop_vals.iter().flat_map(|prop_val| { + let results_map= self.loop_prop_val(args, prop_val); + results_map.into_values().collect::>() + }).collect() } #[inline(always)] - fn loop_prop_val(&mut self, args: &Args, prop_val: &PropVal) { + fn loop_prop_val(&mut self, args: &Args, prop_val: &PropVal) -> ResultsMap { let mut vars = Vars { + results: HashMap::new(), interval_start_to_entered_timestamps: HashMap::new(), }; @@ -139,11 +143,13 @@ impl AggregateFunnelRow { let fully_excluded = vars.interval_start_to_entered_timestamps.values().find(|interval_data| interval_data.max_step.excluded == Exclusion::Full); if fully_excluded.is_none() { for (interval_start, interval_data) in vars.interval_start_to_entered_timestamps.into_iter() { - if !self.results.contains_key(&interval_start) && interval_data.max_step.step >= args.from_step + 1 && interval_data.max_step.excluded != Exclusion::Partial { - self.results.insert(interval_start, ResultStruct(interval_start, -1, prop_val.clone(), interval_data.max_step.event_uuid)); + if !vars.results.contains_key(&interval_start) && interval_data.max_step.step >= args.from_step + 1 && interval_data.max_step.excluded != Exclusion::Partial { + vars.results.insert(interval_start, ResultStruct(interval_start, -1, prop_val.clone(), interval_data.max_step.event_uuid)); } } } + + vars.results } #[inline(always)] @@ -163,8 +169,10 @@ impl AggregateFunnelRow { *step }) as usize; + let is_unmatched_step_attribution = self.breakdown_step.map(|breakdown_step| step - 1 == breakdown_step).unwrap_or(false) && *prop_val != event.breakdown; + if step == 1 { - if !self.results.contains_key(&event.interval_start) { + if !is_unmatched_step_attribution && !vars.results.contains_key(&event.interval_start) { let entered_timestamp_one = EnteredTimestamp { timestamp: event.timestamp, excluded: false }; let interval = vars.interval_start_to_entered_timestamps.get_mut(&event.interval_start); if interval.is_none() || interval.as_ref().map( | interval | interval.max_step.step == 1 && interval.max_step.excluded != Exclusion::Not).unwrap() { @@ -201,7 +209,6 @@ impl AggregateFunnelRow { } } } else { - let is_unmatched_step_attribution = self.breakdown_step.map(|breakdown_step| step == breakdown_step - 1).unwrap_or(false) && *prop_val != event.breakdown; if !is_unmatched_step_attribution { if !previous_step_excluded { interval_data.entered_timestamp[step] = EnteredTimestamp { @@ -211,7 +218,7 @@ impl AggregateFunnelRow { } // check if we have hit the goal. if we have, remove it from the list and add it to the successful_timestamps if interval_data.entered_timestamp[args.num_steps].timestamp != 0.0 { - self.results.insert( + vars.results.insert( interval_start, ResultStruct(interval_start, 1, prop_val.clone(), event.uuid) ); diff --git a/livestream/go.mod b/livestream/go.mod index 0efed981a97d64..4f65785855d820 100644 --- a/livestream/go.mod +++ b/livestream/go.mod @@ -55,10 +55,10 @@ require ( github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.2 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.23.0 // indirect + golang.org/x/crypto v0.31.0 // indirect golang.org/x/net v0.25.0 // indirect - golang.org/x/sys v0.20.0 // indirect - golang.org/x/text v0.15.0 // indirect + golang.org/x/sys v0.28.0 // indirect + golang.org/x/text v0.21.0 // indirect golang.org/x/time v0.5.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/livestream/go.sum b/livestream/go.sum index 4a523d7facffd0..f3065be13f8403 100644 --- a/livestream/go.sum +++ b/livestream/go.sum @@ -386,31 +386,31 @@ go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= -golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI= -golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/exp v0.0.0-20240119083558-1b970713d09a h1:Q8/wZp0KX97QFTc2ywcOE0YRjZPVIx+MXInMzdvQqcA= golang.org/x/exp v0.0.0-20240119083558-1b970713d09a/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08= -golang.org/x/mod v0.16.0 h1:QX4fJ0Rr5cPQCF7O9lh9Se4pmwfwskqZfq5moyldzic= -golang.org/x/mod v0.16.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/oauth2 v0.16.0 h1:aDkGMBSYxElaoP81NpoUoz2oo2R2wHdZpGToUxfyQrQ= golang.org/x/oauth2 v0.16.0/go.mod h1:hqZ+0LWXsiVoZpeld6jVt06P3adbS2Uu911W1SsJv2o= -golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= -golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= -golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw= -golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= -golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= -golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q= +golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= +golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= -golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc= -golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa h1:ePqxpG3LVx+feAUOx8YmR5T7rc0rdzK8DyxM8cQ9zq0= diff --git a/mypy-baseline.txt b/mypy-baseline.txt index c4b70836c8354d..ca4b578d231b4e 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -1,4 +1,67 @@ posthog/warehouse/models/ssh_tunnel.py:0: error: Incompatible types in assignment (expression has type "NoEncryption", variable has type "BestAvailableEncryption") [assignment] +posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Statement is unreachable [unreachable] +posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Non-overlapping equality check (left operand type: "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'json', 'decimal', 'wei', 'date', 'time'] | None", right operand type: "Literal['interval']") [comparison-overlap] +posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, ndarray[Any, dtype[Any]]]"; expected type "str" [index] +posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, ndarray[Any, dtype[Any]]]"; expected type "str" [index] +posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, TColumnSchema]"; expected type "str" [index] +posthog/temporal/data_imports/pipelines/sql_database/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Item "None" of "Incremental[Any] | None" has no attribute "row_order" [union-attr] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "Literal['asc', 'desc'] | Any | None", variable has type "Literal['asc', 'desc']") [assignment] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Column[Any]") [assignment] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Literal['asc', 'desc']") [assignment] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Item "None" of "dict[str, Any] | None" has no attribute "get" [union-attr] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Argument "primary_key" to "make_hints" has incompatible type "list[str] | None"; expected "str | Sequence[str] | Callable[[Any], str | Sequence[str]]" [arg-type] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Dict entry 2 has incompatible type "Literal['auto']": "None"; expected "Literal['json_response', 'header_link', 'auto', 'single_page', 'cursor', 'offset', 'page_number']": "type[BasePaginator]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "AuthConfigBase") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "get_auth_class" has incompatible type "Literal['bearer', 'api_key', 'http_basic'] | None"; expected "Literal['bearer', 'api_key', 'http_basic']" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Need type annotation for "dependency_graph" [var-annotated] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", target has type "ResolvedParam") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible return value type (got "tuple[TopologicalSorter[Any], dict[str, EndpointResource], dict[str, ResolvedParam]]", expected "tuple[Any, dict[str, EndpointResource], dict[str, ResolvedParam | None]]") [return-value] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("str | Endpoint | None") [operator] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type variable "StrOrLiteralStr" of "parse" of "Formatter" cannot be "str | None" [type-var] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "pop" [union-attr] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "str | None" has no attribute "format" [union-attr] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "single_entity_path" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "items" [union-attr] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Statement is unreachable [unreachable] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "base_url" to "RESTClient" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/utils.py:0: error: No overload variant of "asdict" matches argument type "type[DataclassInstance]" [call-overload] posthog/utils.py:0: note: Possible overload variants: posthog/utils.py:0: note: def asdict(obj: DataclassInstance) -> dict[str, Any] @@ -80,12 +143,11 @@ posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Item "No posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Argument 1 to "float" has incompatible type "Any | None"; expected "str | Buffer | SupportsFloat | SupportsIndex" [arg-type] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Argument 1 to "clean_display" has incompatible type "Any | None"; expected "str" [arg-type] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Dict entry 0 has incompatible type "str": "FunnelsFilter"; expected "str": "TrendsFilter" [dict-item] -posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Dict entry 0 has incompatible type "str": "RetentionFilter"; expected "str": "TrendsFilter" [dict-item] -posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Argument 1 to "to_base_entity_dict" has incompatible type "Any | None"; expected "dict[Any, Any]" [arg-type] -posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Argument 1 to "to_base_entity_dict" has incompatible type "Any | None"; expected "dict[Any, Any]" [arg-type] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Dict entry 0 has incompatible type "str": "PathsFilter"; expected "str": "TrendsFilter" [dict-item] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Dict entry 0 has incompatible type "str": "LifecycleFilter"; expected "str": "TrendsFilter" [dict-item] posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Dict entry 0 has incompatible type "str": "StickinessFilter"; expected "str": "TrendsFilter" [dict-item] +posthog/warehouse/models/external_data_schema.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "int | float") [assignment] +posthog/warehouse/models/external_data_schema.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "int | float") [assignment] posthog/session_recordings/models/session_recording.py:0: error: Argument "distinct_id" to "MissingPerson" has incompatible type "str | None"; expected "str" [arg-type] posthog/session_recordings/models/session_recording.py:0: error: Incompatible type for lookup 'persondistinctid__team_id': (got "Team", expected "str | int") [misc] posthog/models/hog_functions/hog_function.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] @@ -219,10 +281,6 @@ posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined [name-defined] posthog/demo/matrix/matrix.py:0: error: Name "timezone.datetime" is not defined [name-defined] posthog/api/shared.py:0: error: Incompatible return value type (got "int | None", expected "Level | None") [return-value] -ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] -ee/billing/quota_limiting.py:0: error: "object" has no attribute "get" [attr-defined] -ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] -ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] posthog/test/base.py:0: error: Module has no attribute "configure" [attr-defined] posthog/test/base.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Organization") [assignment] posthog/test/base.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Project") [assignment] @@ -248,8 +306,6 @@ ee/tasks/subscriptions/email_subscriptions.py:0: error: Item "None" of "User | N ee/tasks/subscriptions/email_subscriptions.py:0: error: Item "None" of "datetime | None" has no attribute "isoformat" [union-attr] ee/tasks/subscriptions/email_subscriptions.py:0: error: Item "None" of "datetime | None" has no attribute "strftime" [union-attr] ee/tasks/subscriptions/email_subscriptions.py:0: error: Item "None" of "User | None" has no attribute "first_name" [union-attr] -ee/billing/billing_manager.py:0: error: Module has no attribute "utc" [attr-defined] -ee/billing/billing_manager.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "bool | Combinable | None") [assignment] posthog/models/property/util.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/models/property/util.py:0: error: Argument 3 to "format_filter_query" has incompatible type "HogQLContext | None"; expected "HogQLContext" [arg-type] posthog/models/property/util.py:0: error: Argument 3 to "format_cohort_subquery" has incompatible type "HogQLContext | None"; expected "HogQLContext" [arg-type] @@ -268,15 +324,21 @@ posthog/hogql/property.py:0: error: Incompatible type for lookup 'id': (got "str posthog/hogql/property.py:0: error: Incompatible type for lookup 'pk': (got "str | float", expected "str | int") [misc] posthog/api/utils.py:0: error: Incompatible types in assignment (expression has type "type[EventDefinition]", variable has type "type[EnterpriseEventDefinition]") [assignment] posthog/api/utils.py:0: error: Argument 1 to "UUID" has incompatible type "int | str"; expected "str | None" [arg-type] +ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] +ee/billing/quota_limiting.py:0: error: "object" has no attribute "get" [attr-defined] +ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] +ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index] posthog/hogql/filters.py:0: error: Incompatible default for argument "team" (default has type "None", argument has type "Team") [assignment] posthog/hogql/filters.py:0: note: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True posthog/hogql/filters.py:0: note: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase -posthog/api/capture.py:0: error: Module has no attribute "utc" [attr-defined] +ee/billing/billing_manager.py:0: error: Module has no attribute "utc" [attr-defined] +ee/billing/billing_manager.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "bool | Combinable | None") [assignment] posthog/hogql/query.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "str | SelectQuery | SelectSetQuery") [assignment] posthog/hogql/query.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery | SelectSetQuery") [assignment] posthog/hogql/query.py:0: error: Argument 1 to "get_default_limit_for_context" has incompatible type "LimitContext | None"; expected "LimitContext" [arg-type] posthog/hogql/query.py:0: error: Subclass of "SelectQuery" and "SelectSetQuery" cannot exist: would have incompatible method signatures [unreachable] posthog/api/organization.py:0: error: Incompatible return value type (got "int | None", expected "Level | None") [return-value] +posthog/api/capture.py:0: error: Module has no attribute "utc" [attr-defined] posthog/queries/person_query.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/queries/event_query/event_query.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/hogql_queries/sessions_timeline_query_runner.py:0: error: Statement is unreachable [unreachable] @@ -367,9 +429,40 @@ posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "delete" [union-attr] -posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Statement is unreachable [unreachable] -posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Non-overlapping equality check (left operand type: "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'json', 'decimal', 'wei', 'date', 'time'] | None", right operand type: "Literal['interval']") [comparison-overlap] -posthog/temporal/data_imports/pipelines/sql_database/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: error: No overload variant of "with_only_columns" of "Select" matches argument type "ReadOnlyColumnCollection[str, Column[Any]]" [call-overload] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: Possible overload variants: +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], /) -> Select[tuple[_T0]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], /) -> Select[tuple[_T0, _T1]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], /) -> Select[tuple[_T0, _T1, _T2]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], /) -> Select[tuple[_T0, _T1, _T2, _T3]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5, _T6] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], TypedColumnsClauseRole[_T6] | SQLCoreOperations[_T6] | type[_T6], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], TypedColumnsClauseRole[_T6] | SQLCoreOperations[_T6] | type[_T6], TypedColumnsClauseRole[_T7] | SQLCoreOperations[_T7] | type[_T7], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def with_only_columns(self, *entities: TypedColumnsClauseRole[Any] | ColumnsClauseRole | SQLCoreOperations[Any] | Literal['*', 1] | type[Any] | Inspectable[_HasClauseElement[Any]] | _HasClauseElement[Any], maintain_column_froms: bool = ..., **Any) -> Select[Any] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: error: No overload variant of "resource" matches argument types "Callable[[Engine, Table, int, Literal['sqlalchemy', 'pyarrow', 'pandas', 'connectorx'], Incremental[Any] | None, bool, Callable[[Table], None] | None, Literal['minimal', 'full', 'full_with_precision'], dict[str, Any] | None, Callable[[TypeEngine[Any]], TypeEngine[Any] | type[TypeEngine[Any]] | None] | None, list[str] | None, Callable[[Select[Any], Table], Select[Any]] | None, list[str] | None], Iterator[Any]]", "str", "list[str] | None", "list[str] | None", "dict[str, TColumnSchema]", "Collection[str]", "str" [call-overload] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: Possible overload variants: +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TResourceFunParams`-1, TDltResourceImpl: DltResource] resource(Callable[TResourceFunParams, Any], /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> TDltResourceImpl +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(None = ..., /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> Callable[[Callable[TResourceFunParams, Any]], TDltResourceImpl] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(None = ..., /, name: str | Callable[[Any], str] = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ..., standalone: Literal[True] = ...) -> Callable[[Callable[TResourceFunParams, Any]], Callable[TResourceFunParams, TDltResourceImpl]] +posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(list[Any] | tuple[Any] | Iterator[Any], /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> TDltResourceImpl posthog/tasks/test/test_update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr] posthog/tasks/test/test_stop_surveys_reached_target.py:0: error: No overload variant of "__sub__" of "datetime" matches argument type "None" [operator] posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: Possible overload variants: @@ -396,6 +489,10 @@ posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "memoryvi posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "None" of "bytes | memoryview | None" has no attribute "decode" [union-attr] posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "memoryview" of "bytes | memoryview | None" has no attribute "decode" [union-attr] posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "None" of "bytes | memoryview | None" has no attribute "decode" [union-attr] +posthog/management/commands/warehouse_last_incremental_value_sync.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/management/commands/warehouse_last_incremental_value_sync.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/management/commands/warehouse_last_incremental_value_sync.py:0: error: Argument 3 to "get_value" of "ConfigProvider" has incompatible type "str | None"; expected "str" [arg-type] +posthog/management/commands/warehouse_last_incremental_value_sync.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/management/commands/sync_persons_to_clickhouse.py:0: error: Argument 4 to "create_person_override" has incompatible type "int | None"; expected "int" [arg-type] posthog/management/commands/sync_persons_to_clickhouse.py:0: error: Argument "group_type_index" to "raw_create_group_ch" has incompatible type "int"; expected "Literal[0, 1, 2, 3, 4]" [arg-type] posthog/management/commands/migrate_team.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "BatchExport") [assignment] @@ -503,10 +600,12 @@ posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in a posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "str | int | Combinable") [assignment] posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "dict[str, dict[str, str | bool]] | dict[str, str]", variable has type "dict[str, dict[str, str]]") [assignment] posthog/warehouse/data_load/source_templates.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "Type") [assignment] -posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, ndarray[Any, dtype[Any]]]"; expected type "str" [index] -posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, ndarray[Any, dtype[Any]]]"; expected type "str" [index] -posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, TColumnSchema]"; expected type "str" [index] +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: No overload variant of "get" of "dict" matches argument types "str", "tuple[()]" [call-overload] +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: Possible overload variants: +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, /) -> Sequence[str] | None +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, Sequence[str], /) -> Sequence[str] +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def [_T] get(self, Type, _T, /) -> Sequence[str] | _T +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: Argument "source_id" to "sync_old_schemas_with_new_schemas" has incompatible type "str"; expected "UUID" [arg-type] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a return type annotation [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] @@ -668,6 +767,16 @@ ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseTrendExper ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseFunnelExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type] ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseSecondaryExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type] ee/clickhouse/views/experiments.py:0: error: Item "None" of "User | None" has no attribute "email" [union-attr] +posthog/warehouse/api/external_data_schema.py:0: error: Incompatible return value type (got "str | None", expected "SyncType | None") [return-value] +posthog/warehouse/api/external_data_schema.py:0: error: Argument 1 to "get_sql_schemas_for_source_type" has incompatible type "str"; expected "Type" [arg-type] +posthog/warehouse/api/external_data_schema.py:0: error: No overload variant of "get" of "dict" matches argument type "str" [call-overload] +posthog/warehouse/api/external_data_schema.py:0: note: Possible overload variants: +posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, /) -> dict[str, list[IncrementalField]] | None +posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, dict[str, list[IncrementalField]], /) -> dict[str, list[IncrementalField]] +posthog/warehouse/api/external_data_schema.py:0: note: def [_T] get(self, Type, _T, /) -> dict[str, list[IncrementalField]] | _T +posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] @@ -676,22 +785,6 @@ posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict k posthog/temporal/data_modeling/run_workflow.py:0: error: Dict entry 20 has incompatible type "str": "Literal['complex']"; expected "str": "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'binary', 'json', 'decimal', 'wei', 'date', 'time']" [dict-item] posthog/temporal/data_modeling/run_workflow.py:0: error: Dict entry 21 has incompatible type "str": "Literal['complex']"; expected "str": "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'binary', 'json', 'decimal', 'wei', 'date', 'time']" [dict-item] posthog/temporal/data_modeling/run_workflow.py:0: error: Dict entry 22 has incompatible type "str": "Literal['complex']"; expected "str": "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'binary', 'json', 'decimal', 'wei', 'date', 'time']" [dict-item] -posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: No overload variant of "get" of "dict" matches argument types "str", "tuple[()]" [call-overload] -posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: Possible overload variants: -posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, /) -> Sequence[str] | None -posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, Sequence[str], /) -> Sequence[str] -posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def [_T] get(self, Type, _T, /) -> Sequence[str] | _T -posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: Argument "source_id" to "sync_old_schemas_with_new_schemas" has incompatible type "str"; expected "UUID" [arg-type] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Item "None" of "Incremental[Any] | None" has no attribute "row_order" [union-attr] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "Literal['asc', 'desc'] | Any | None", variable has type "Literal['asc', 'desc']") [assignment] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Column[Any]") [assignment] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Literal['asc', 'desc']") [assignment] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Item "None" of "dict[str, Any] | None" has no attribute "get" [union-attr] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Argument "primary_key" to "make_hints" has incompatible type "list[str] | None"; expected "str | Sequence[str] | Callable[[Any], str | Sequence[str]]" [arg-type] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: "FilesystemDestinationClientConfiguration" has no attribute "delta_jobs_per_write" [attr-defined] posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: "type[FilesystemDestinationClientConfiguration]" has no attribute "delta_jobs_per_write" [attr-defined] posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "DataWarehouseCredential | Combinable | None") [assignment] @@ -724,23 +817,6 @@ posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_execute_async_calls" (hint: "_execute_async_calls: list[] = ...") [var-annotated] posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_cursors" (hint: "_cursors: list[] = ...") [var-annotated] posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: List item 0 has incompatible type "tuple[str, str, int, int, int, int, str, int]"; expected "tuple[str, str, int, int, str, str, str, str]" [list-item] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: error: No overload variant of "with_only_columns" of "Select" matches argument type "ReadOnlyColumnCollection[str, Column[Any]]" [call-overload] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: Possible overload variants: -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], /) -> Select[tuple[_T0]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], /) -> Select[tuple[_T0, _T1]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], /) -> Select[tuple[_T0, _T1, _T2]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], /) -> Select[tuple[_T0, _T1, _T2, _T3]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5, _T6] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], TypedColumnsClauseRole[_T6] | SQLCoreOperations[_T6] | type[_T6], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], TypedColumnsClauseRole[_T6] | SQLCoreOperations[_T6] | type[_T6], TypedColumnsClauseRole[_T7] | SQLCoreOperations[_T7] | type[_T7], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def with_only_columns(self, *entities: TypedColumnsClauseRole[Any] | ColumnsClauseRole | SQLCoreOperations[Any] | Literal['*', 1] | type[Any] | Inspectable[_HasClauseElement[Any]] | _HasClauseElement[Any], maintain_column_froms: bool = ..., **Any) -> Select[Any] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: error: No overload variant of "resource" matches argument types "Callable[[Engine, Table, int, Literal['sqlalchemy', 'pyarrow', 'pandas', 'connectorx'], Incremental[Any] | None, bool, Callable[[Table], None] | None, Literal['minimal', 'full', 'full_with_precision'], dict[str, Any] | None, Callable[[TypeEngine[Any]], TypeEngine[Any] | type[TypeEngine[Any]] | None] | None, list[str] | None, Callable[[Select[Any], Table], Select[Any]] | None, list[str] | None], Iterator[Any]]", "str", "list[str] | None", "list[str] | None", "dict[str, TColumnSchema]", "Collection[str]", "str" [call-overload] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: Possible overload variants: -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TResourceFunParams`-1, TDltResourceImpl: DltResource] resource(Callable[TResourceFunParams, Any], /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> TDltResourceImpl -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(None = ..., /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> Callable[[Callable[TResourceFunParams, Any]], TDltResourceImpl] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(None = ..., /, name: str | Callable[[Any], str] = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ..., standalone: Literal[True] = ...) -> Callable[[Callable[TResourceFunParams, Any]], Callable[TResourceFunParams, TDltResourceImpl]] -posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(list[Any] | tuple[Any] | Iterator[Any], /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> TDltResourceImpl posthog/migrations/0237_remove_timezone_from_teams.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type] posthog/migrations/0228_fix_tile_layouts.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type] posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined] @@ -748,79 +824,22 @@ posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does no posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined] posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py:0: error: Incompatible types in assignment (expression has type "str | int", variable has type "int") [assignment] +posthog/temporal/data_imports/external_data_job.py:0: error: Argument "status" to "update_external_job_status" has incompatible type "str"; expected "Status" [arg-type] posthog/api/sharing.py:0: error: Item "None" of "list[Any] | None" has no attribute "__iter__" (not iterable) [union-attr] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Dict entry 2 has incompatible type "Literal['auto']": "None"; expected "Literal['json_response', 'header_link', 'auto', 'single_page', 'cursor', 'offset', 'page_number']": "type[BasePaginator]" [dict-item] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "AuthConfigBase") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "get_auth_class" has incompatible type "Literal['bearer', 'api_key', 'http_basic'] | None"; expected "Literal['bearer', 'api_key', 'http_basic']" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Need type annotation for "dependency_graph" [var-annotated] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", target has type "ResolvedParam") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible return value type (got "tuple[TopologicalSorter[Any], dict[str, EndpointResource], dict[str, ResolvedParam]]", expected "tuple[Any, dict[str, EndpointResource], dict[str, ResolvedParam | None]]") [return-value] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("str | Endpoint | None") [operator] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type variable "StrOrLiteralStr" of "parse" of "Formatter" cannot be "str | None" [type-var] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "pop" [union-attr] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "str | None" has no attribute "format" [union-attr] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "single_entity_path" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "items" [union-attr] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Statement is unreachable [unreachable] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] posthog/api/test/batch_exports/conftest.py:0: error: Signature of "run" incompatible with supertype "Worker" [override] posthog/api/test/batch_exports/conftest.py:0: note: Superclass: posthog/api/test/batch_exports/conftest.py:0: note: def run(self) -> Coroutine[Any, Any, None] posthog/api/test/batch_exports/conftest.py:0: note: Subclass: posthog/api/test/batch_exports/conftest.py:0: note: def run(self, loop: Any) -> Any posthog/api/test/batch_exports/conftest.py:0: error: Argument "activities" to "ThreadedWorker" has incompatible type "list[function]"; expected "Sequence[Callable[..., Any]]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "base_url" to "RESTClient" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/data_imports/test_end_to_end.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/test/test_middleware.py:0: error: Incompatible types in assignment (expression has type "_MonkeyPatchedWSGIResponse", variable has type "_MonkeyPatchedResponse") [assignment] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible return value type (got "dict[str, Collection[str]]", expected "dict[str, str]") [return-value] posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible types in assignment (expression has type "dict[str, Collection[str]]", variable has type "dict[str, str]") [assignment] posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Unpacked dict entry 1 has incompatible type "str"; expected "SupportsKeysAndGetItem[str, str]" [dict-item] @@ -862,22 +881,6 @@ posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExpo posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] posthog/api/test/batch_exports/test_pause.py:0: error: "batch_export_delete_schedule" does not return a value (it only ever returns None) [func-returns-value] -posthog/warehouse/api/external_data_schema.py:0: error: Incompatible return value type (got "str | None", expected "SyncType | None") [return-value] -posthog/warehouse/api/external_data_schema.py:0: error: Argument 1 to "get_sql_schemas_for_source_type" has incompatible type "str"; expected "Type" [arg-type] -posthog/warehouse/api/external_data_schema.py:0: error: No overload variant of "get" of "dict" matches argument type "str" [call-overload] -posthog/warehouse/api/external_data_schema.py:0: note: Possible overload variants: -posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, /) -> dict[str, list[IncrementalField]] | None -posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, dict[str, list[IncrementalField]], /) -> dict[str, list[IncrementalField]] -posthog/warehouse/api/external_data_schema.py:0: note: def [_T] get(self, Type, _T, /) -> dict[str, list[IncrementalField]] | _T -posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/external_data_job.py:0: error: Argument "status" to "update_external_job_status" has incompatible type "str"; expected "Status" [arg-type] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/temporal/tests/data_imports/test_end_to_end.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/api/query.py:0: error: Statement is unreachable [unreachable] posthog/api/test/test_capture.py:0: error: Statement is unreachable [unreachable] posthog/api/test/test_capture.py:0: error: Incompatible return value type (got "_MonkeyPatchedWSGIResponse", expected "HttpResponse") [return-value] diff --git a/package.json b/package.json index f43b2dd2ae1e11..14bf97b7876c02 100644 --- a/package.json +++ b/package.json @@ -161,7 +161,7 @@ "pmtiles": "^2.11.0", "postcss": "^8.4.31", "postcss-preset-env": "^9.3.0", - "posthog-js": "1.194.6", + "posthog-js": "1.200.2", "posthog-js-lite": "3.0.0", "prettier": "^2.8.8", "prop-types": "^15.7.2", @@ -265,6 +265,7 @@ "axe-core": "^4.4.3", "babel-loader": "^8.0.6", "babel-plugin-import": "^1.13.0", + "caniuse-lite": "^1.0.30001687", "concurrently": "^5.3.0", "css-loader": "^3.4.2", "cypress": "^13.11.0", @@ -376,7 +377,7 @@ "last 2 edge versions" ], "production": [ - "defaults and not not op_mini all" + "defaults and not op_mini all" ] }, "browser": { diff --git a/plugin-server/package.json b/plugin-server/package.json index 3699ce1bbc769a..1d46f73ad6c748 100644 --- a/plugin-server/package.json +++ b/plugin-server/package.json @@ -42,8 +42,8 @@ "repository": "https://github.com/PostHog/posthog-plugin-server", "license": "MIT", "dependencies": { - "@aws-sdk/client-s3": "^3.315.0", - "@aws-sdk/lib-storage": "^3.315.0", + "@aws-sdk/client-s3": "^3.709.0", + "@aws-sdk/lib-storage": "^3.709.0", "@babel/core": "^7.18.10", "@babel/plugin-transform-react-jsx": "^7.18.10", "@babel/preset-env": "^7.18.10", diff --git a/plugin-server/pnpm-lock.yaml b/plugin-server/pnpm-lock.yaml index e49da8ba134910..685a4b68c53148 100644 --- a/plugin-server/pnpm-lock.yaml +++ b/plugin-server/pnpm-lock.yaml @@ -11,11 +11,11 @@ patchedDependencies: dependencies: '@aws-sdk/client-s3': - specifier: ^3.315.0 - version: 3.319.0 + specifier: ^3.709.0 + version: 3.709.0 '@aws-sdk/lib-storage': - specifier: ^3.315.0 - version: 3.319.0(@aws-sdk/abort-controller@3.374.0)(@aws-sdk/client-s3@3.319.0) + specifier: ^3.709.0 + version: 3.709.0(@aws-sdk/client-s3@3.709.0) '@babel/core': specifier: ^7.18.10 version: 7.21.4 @@ -355,979 +355,651 @@ packages: '@jridgewell/gen-mapping': 0.3.3 '@jridgewell/trace-mapping': 0.3.18 - /@aws-crypto/crc32@3.0.0: - resolution: {integrity: sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA==} + /@aws-crypto/crc32@5.2.0: + resolution: {integrity: sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-crypto/util': 3.0.0 - '@aws-sdk/types': 3.310.0 - tslib: 1.14.1 - dev: false - - /@aws-crypto/crc32c@3.0.0: - resolution: {integrity: sha512-ENNPPManmnVJ4BTXlOjAgD7URidbAznURqD0KvfREyc4o20DPYdEldU1f5cQ7Jbj0CJJSPaMIk/9ZshdB3210w==} - dependencies: - '@aws-crypto/util': 3.0.0 - '@aws-sdk/types': 3.310.0 - tslib: 1.14.1 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.709.0 + tslib: 2.6.2 dev: false - /@aws-crypto/ie11-detection@3.0.0: - resolution: {integrity: sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q==} + /@aws-crypto/crc32c@5.2.0: + resolution: {integrity: sha512-+iWb8qaHLYKrNvGRbiYRHSdKRWhto5XlZUEBwDjYNf+ly5SVYG6zEoYIdxvf5R3zyeP16w4PLBn3rH1xc74Rag==} dependencies: - tslib: 1.14.1 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.709.0 + tslib: 2.6.2 dev: false - /@aws-crypto/sha1-browser@3.0.0: - resolution: {integrity: sha512-NJth5c997GLHs6nOYTzFKTbYdMNA6/1XlKVgnZoaZcQ7z7UJlOgj2JdbHE8tiYLS3fzXNCguct77SPGat2raSw==} + /@aws-crypto/sha1-browser@5.2.0: + resolution: {integrity: sha512-OH6lveCFfcDjX4dbAvCFSYUjJZjDr/3XJ3xHtjn3Oj5b9RjojQo8npoLeA/bNwkOkrSQ0wgrHzXk4tDRxGKJeg==} dependencies: - '@aws-crypto/ie11-detection': 3.0.0 - '@aws-crypto/supports-web-crypto': 3.0.0 - '@aws-crypto/util': 3.0.0 - '@aws-sdk/types': 3.310.0 + '@aws-crypto/supports-web-crypto': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.709.0 '@aws-sdk/util-locate-window': 3.310.0 - '@aws-sdk/util-utf8-browser': 3.259.0 - tslib: 1.14.1 + '@smithy/util-utf8': 2.3.0 + tslib: 2.6.2 dev: false - /@aws-crypto/sha256-browser@3.0.0: - resolution: {integrity: sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ==} + /@aws-crypto/sha256-browser@5.2.0: + resolution: {integrity: sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==} dependencies: - '@aws-crypto/ie11-detection': 3.0.0 - '@aws-crypto/sha256-js': 3.0.0 - '@aws-crypto/supports-web-crypto': 3.0.0 - '@aws-crypto/util': 3.0.0 - '@aws-sdk/types': 3.310.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-crypto/supports-web-crypto': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.709.0 '@aws-sdk/util-locate-window': 3.310.0 - '@aws-sdk/util-utf8-browser': 3.259.0 - tslib: 1.14.1 - dev: false - - /@aws-crypto/sha256-js@3.0.0: - resolution: {integrity: sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ==} - dependencies: - '@aws-crypto/util': 3.0.0 - '@aws-sdk/types': 3.310.0 - tslib: 1.14.1 - dev: false - - /@aws-crypto/supports-web-crypto@3.0.0: - resolution: {integrity: sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg==} - dependencies: - tslib: 1.14.1 - dev: false - - /@aws-crypto/util@3.0.0: - resolution: {integrity: sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==} - dependencies: - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-utf8-browser': 3.259.0 - tslib: 1.14.1 + '@smithy/util-utf8': 2.3.0 + tslib: 2.6.2 dev: false - /@aws-sdk/abort-controller@3.310.0: - resolution: {integrity: sha512-v1zrRQxDLA1MdPim159Vx/CPHqsB4uybSxRi1CnfHO5ZjHryx3a5htW2gdGAykVCul40+yJXvfpufMrELVxH+g==} - engines: {node: '>=14.0.0'} + /@aws-crypto/sha256-js@5.2.0: + resolution: {integrity: sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.709.0 + tslib: 2.6.2 dev: false - /@aws-sdk/abort-controller@3.374.0: - resolution: {integrity: sha512-pO1pqFBdIF28ZvnJmg58Erj35RLzXsTrjvHghdc/xgtSvodFFCNrUsPg6AP3On8eiw9elpHoS4P8jMx1pHDXEw==} - engines: {node: '>=14.0.0'} - deprecated: This package has moved to @smithy/abort-controller + /@aws-crypto/supports-web-crypto@5.2.0: + resolution: {integrity: sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==} dependencies: - '@smithy/abort-controller': 1.1.0 tslib: 2.6.2 dev: false - /@aws-sdk/chunked-blob-reader@3.310.0: - resolution: {integrity: sha512-CrJS3exo4mWaLnWxfCH+w88Ou0IcAZSIkk4QbmxiHl/5Dq705OLoxf4385MVyExpqpeVJYOYQ2WaD8i/pQZ2fg==} + /@aws-crypto/util@5.2.0: + resolution: {integrity: sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==} dependencies: - tslib: 2.5.0 + '@aws-sdk/types': 3.709.0 + '@smithy/util-utf8': 2.3.0 + tslib: 2.6.2 dev: false - /@aws-sdk/client-s3@3.319.0: - resolution: {integrity: sha512-/XzElEO4iZTBgvrcWq20sxKLvhRetjT1gOPRF4Ra2iSCbeVIT/feYdEaSSgMsaiqrREywBc+59NiOyxImWTaOA==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-crypto/sha1-browser': 3.0.0 - '@aws-crypto/sha256-browser': 3.0.0 - '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sts': 3.319.0 - '@aws-sdk/config-resolver': 3.310.0 - '@aws-sdk/credential-provider-node': 3.319.0 - '@aws-sdk/eventstream-serde-browser': 3.310.0 - '@aws-sdk/eventstream-serde-config-resolver': 3.310.0 - '@aws-sdk/eventstream-serde-node': 3.310.0 - '@aws-sdk/fetch-http-handler': 3.310.0 - '@aws-sdk/hash-blob-browser': 3.310.0 - '@aws-sdk/hash-node': 3.310.0 - '@aws-sdk/hash-stream-node': 3.310.0 - '@aws-sdk/invalid-dependency': 3.310.0 - '@aws-sdk/md5-js': 3.310.0 - '@aws-sdk/middleware-bucket-endpoint': 3.310.0 - '@aws-sdk/middleware-content-length': 3.310.0 - '@aws-sdk/middleware-endpoint': 3.310.0 - '@aws-sdk/middleware-expect-continue': 3.310.0 - '@aws-sdk/middleware-flexible-checksums': 3.310.0 - '@aws-sdk/middleware-host-header': 3.310.0 - '@aws-sdk/middleware-location-constraint': 3.310.0 - '@aws-sdk/middleware-logger': 3.310.0 - '@aws-sdk/middleware-recursion-detection': 3.310.0 - '@aws-sdk/middleware-retry': 3.310.0 - '@aws-sdk/middleware-sdk-s3': 3.310.0 - '@aws-sdk/middleware-serde': 3.310.0 - '@aws-sdk/middleware-signing': 3.310.0 - '@aws-sdk/middleware-ssec': 3.310.0 - '@aws-sdk/middleware-stack': 3.310.0 - '@aws-sdk/middleware-user-agent': 3.319.0 - '@aws-sdk/node-config-provider': 3.310.0 - '@aws-sdk/node-http-handler': 3.310.0 - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/signature-v4-multi-region': 3.310.0 - '@aws-sdk/smithy-client': 3.316.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/url-parser': 3.310.0 - '@aws-sdk/util-base64': 3.310.0 - '@aws-sdk/util-body-length-browser': 3.310.0 - '@aws-sdk/util-body-length-node': 3.310.0 - '@aws-sdk/util-defaults-mode-browser': 3.316.0 - '@aws-sdk/util-defaults-mode-node': 3.316.0 - '@aws-sdk/util-endpoints': 3.319.0 - '@aws-sdk/util-retry': 3.310.0 - '@aws-sdk/util-stream-browser': 3.310.0 - '@aws-sdk/util-stream-node': 3.310.0 - '@aws-sdk/util-user-agent-browser': 3.310.0 - '@aws-sdk/util-user-agent-node': 3.310.0 - '@aws-sdk/util-utf8': 3.310.0 - '@aws-sdk/util-waiter': 3.310.0 - '@aws-sdk/xml-builder': 3.310.0 - fast-xml-parser: 4.1.2 - tslib: 2.5.0 + /@aws-sdk/client-s3@3.709.0: + resolution: {integrity: sha512-IvC7coELoQ4YenTdULArVdL5yk6jNRVUALX1aqv9JlPdrXxb3Om6YrM9e7AlSTLxrULTsAe1ubm8i/DmcSY/Ng==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-crypto/sha1-browser': 5.2.0 + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sso-oidc': 3.709.0(@aws-sdk/client-sts@3.709.0) + '@aws-sdk/client-sts': 3.709.0 + '@aws-sdk/core': 3.709.0 + '@aws-sdk/credential-provider-node': 3.709.0(@aws-sdk/client-sso-oidc@3.709.0)(@aws-sdk/client-sts@3.709.0) + '@aws-sdk/middleware-bucket-endpoint': 3.709.0 + '@aws-sdk/middleware-expect-continue': 3.709.0 + '@aws-sdk/middleware-flexible-checksums': 3.709.0 + '@aws-sdk/middleware-host-header': 3.709.0 + '@aws-sdk/middleware-location-constraint': 3.709.0 + '@aws-sdk/middleware-logger': 3.709.0 + '@aws-sdk/middleware-recursion-detection': 3.709.0 + '@aws-sdk/middleware-sdk-s3': 3.709.0 + '@aws-sdk/middleware-ssec': 3.709.0 + '@aws-sdk/middleware-user-agent': 3.709.0 + '@aws-sdk/region-config-resolver': 3.709.0 + '@aws-sdk/signature-v4-multi-region': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@aws-sdk/util-endpoints': 3.709.0 + '@aws-sdk/util-user-agent-browser': 3.709.0 + '@aws-sdk/util-user-agent-node': 3.709.0 + '@aws-sdk/xml-builder': 3.709.0 + '@smithy/config-resolver': 3.0.13 + '@smithy/core': 2.5.5 + '@smithy/eventstream-serde-browser': 3.0.14 + '@smithy/eventstream-serde-config-resolver': 3.0.11 + '@smithy/eventstream-serde-node': 3.0.13 + '@smithy/fetch-http-handler': 4.1.2 + '@smithy/hash-blob-browser': 3.1.10 + '@smithy/hash-node': 3.0.11 + '@smithy/hash-stream-node': 3.1.10 + '@smithy/invalid-dependency': 3.0.11 + '@smithy/md5-js': 3.0.11 + '@smithy/middleware-content-length': 3.0.13 + '@smithy/middleware-endpoint': 3.2.5 + '@smithy/middleware-retry': 3.0.30 + '@smithy/middleware-serde': 3.0.11 + '@smithy/middleware-stack': 3.0.11 + '@smithy/node-config-provider': 3.1.12 + '@smithy/node-http-handler': 3.3.2 + '@smithy/protocol-http': 4.1.8 + '@smithy/smithy-client': 3.5.0 + '@smithy/types': 3.7.2 + '@smithy/url-parser': 3.0.11 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.30 + '@smithy/util-defaults-mode-node': 3.0.30 + '@smithy/util-endpoints': 2.1.7 + '@smithy/util-middleware': 3.0.11 + '@smithy/util-retry': 3.0.11 + '@smithy/util-stream': 3.3.2 + '@smithy/util-utf8': 3.0.0 + '@smithy/util-waiter': 3.2.0 + tslib: 2.6.2 transitivePeerDependencies: - - '@aws-sdk/signature-v4-crt' - aws-crt dev: false - /@aws-sdk/client-sso-oidc@3.319.0: - resolution: {integrity: sha512-GJBgT/tephRZY3oTbDBMv+G9taoqKUIvGPn+7shmzz2P1SerutsRSfKfDXV+VptPNRoGmjjCLPmWjMFYbFKILQ==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-crypto/sha256-browser': 3.0.0 - '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/config-resolver': 3.310.0 - '@aws-sdk/fetch-http-handler': 3.310.0 - '@aws-sdk/hash-node': 3.310.0 - '@aws-sdk/invalid-dependency': 3.310.0 - '@aws-sdk/middleware-content-length': 3.310.0 - '@aws-sdk/middleware-endpoint': 3.310.0 - '@aws-sdk/middleware-host-header': 3.310.0 - '@aws-sdk/middleware-logger': 3.310.0 - '@aws-sdk/middleware-recursion-detection': 3.310.0 - '@aws-sdk/middleware-retry': 3.310.0 - '@aws-sdk/middleware-serde': 3.310.0 - '@aws-sdk/middleware-stack': 3.310.0 - '@aws-sdk/middleware-user-agent': 3.319.0 - '@aws-sdk/node-config-provider': 3.310.0 - '@aws-sdk/node-http-handler': 3.310.0 - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/smithy-client': 3.316.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/url-parser': 3.310.0 - '@aws-sdk/util-base64': 3.310.0 - '@aws-sdk/util-body-length-browser': 3.310.0 - '@aws-sdk/util-body-length-node': 3.310.0 - '@aws-sdk/util-defaults-mode-browser': 3.316.0 - '@aws-sdk/util-defaults-mode-node': 3.316.0 - '@aws-sdk/util-endpoints': 3.319.0 - '@aws-sdk/util-retry': 3.310.0 - '@aws-sdk/util-user-agent-browser': 3.310.0 - '@aws-sdk/util-user-agent-node': 3.310.0 - '@aws-sdk/util-utf8': 3.310.0 - tslib: 2.5.0 + /@aws-sdk/client-sso-oidc@3.709.0(@aws-sdk/client-sts@3.709.0): + resolution: {integrity: sha512-1w6egz17QQy661lNCRmZZlqIANEbD6g2VFAQIJbVwSiu7brg+GUns+mT1eLLLHAMQc1sL0Ds8/ybSK2SrgGgIA==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.709.0 + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sts': 3.709.0 + '@aws-sdk/core': 3.709.0 + '@aws-sdk/credential-provider-node': 3.709.0(@aws-sdk/client-sso-oidc@3.709.0)(@aws-sdk/client-sts@3.709.0) + '@aws-sdk/middleware-host-header': 3.709.0 + '@aws-sdk/middleware-logger': 3.709.0 + '@aws-sdk/middleware-recursion-detection': 3.709.0 + '@aws-sdk/middleware-user-agent': 3.709.0 + '@aws-sdk/region-config-resolver': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@aws-sdk/util-endpoints': 3.709.0 + '@aws-sdk/util-user-agent-browser': 3.709.0 + '@aws-sdk/util-user-agent-node': 3.709.0 + '@smithy/config-resolver': 3.0.13 + '@smithy/core': 2.5.5 + '@smithy/fetch-http-handler': 4.1.2 + '@smithy/hash-node': 3.0.11 + '@smithy/invalid-dependency': 3.0.11 + '@smithy/middleware-content-length': 3.0.13 + '@smithy/middleware-endpoint': 3.2.5 + '@smithy/middleware-retry': 3.0.30 + '@smithy/middleware-serde': 3.0.11 + '@smithy/middleware-stack': 3.0.11 + '@smithy/node-config-provider': 3.1.12 + '@smithy/node-http-handler': 3.3.2 + '@smithy/protocol-http': 4.1.8 + '@smithy/smithy-client': 3.5.0 + '@smithy/types': 3.7.2 + '@smithy/url-parser': 3.0.11 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.30 + '@smithy/util-defaults-mode-node': 3.0.30 + '@smithy/util-endpoints': 2.1.7 + '@smithy/util-middleware': 3.0.11 + '@smithy/util-retry': 3.0.11 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 transitivePeerDependencies: - aws-crt dev: false - /@aws-sdk/client-sso@3.319.0: - resolution: {integrity: sha512-g46KgAjRiYBS8Oi85DPwSAQpt+Hgmw/YFgGVwZqMfTL70KNJwLFKRa5D9UocQd7t7OjPRdKF7g0Gp5peyAK9dw==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-crypto/sha256-browser': 3.0.0 - '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/config-resolver': 3.310.0 - '@aws-sdk/fetch-http-handler': 3.310.0 - '@aws-sdk/hash-node': 3.310.0 - '@aws-sdk/invalid-dependency': 3.310.0 - '@aws-sdk/middleware-content-length': 3.310.0 - '@aws-sdk/middleware-endpoint': 3.310.0 - '@aws-sdk/middleware-host-header': 3.310.0 - '@aws-sdk/middleware-logger': 3.310.0 - '@aws-sdk/middleware-recursion-detection': 3.310.0 - '@aws-sdk/middleware-retry': 3.310.0 - '@aws-sdk/middleware-serde': 3.310.0 - '@aws-sdk/middleware-stack': 3.310.0 - '@aws-sdk/middleware-user-agent': 3.319.0 - '@aws-sdk/node-config-provider': 3.310.0 - '@aws-sdk/node-http-handler': 3.310.0 - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/smithy-client': 3.316.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/url-parser': 3.310.0 - '@aws-sdk/util-base64': 3.310.0 - '@aws-sdk/util-body-length-browser': 3.310.0 - '@aws-sdk/util-body-length-node': 3.310.0 - '@aws-sdk/util-defaults-mode-browser': 3.316.0 - '@aws-sdk/util-defaults-mode-node': 3.316.0 - '@aws-sdk/util-endpoints': 3.319.0 - '@aws-sdk/util-retry': 3.310.0 - '@aws-sdk/util-user-agent-browser': 3.310.0 - '@aws-sdk/util-user-agent-node': 3.310.0 - '@aws-sdk/util-utf8': 3.310.0 - tslib: 2.5.0 + /@aws-sdk/client-sso@3.709.0: + resolution: {integrity: sha512-Qxeo8cN0jNy6Wnbqq4wucffAGJM6sJjofoTgNtPA6cC7sPYx7aYC6OAAAo6NaMRY+WywOKdS9Wgjx2QYRxKx7w==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/core': 3.709.0 + '@aws-sdk/middleware-host-header': 3.709.0 + '@aws-sdk/middleware-logger': 3.709.0 + '@aws-sdk/middleware-recursion-detection': 3.709.0 + '@aws-sdk/middleware-user-agent': 3.709.0 + '@aws-sdk/region-config-resolver': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@aws-sdk/util-endpoints': 3.709.0 + '@aws-sdk/util-user-agent-browser': 3.709.0 + '@aws-sdk/util-user-agent-node': 3.709.0 + '@smithy/config-resolver': 3.0.13 + '@smithy/core': 2.5.5 + '@smithy/fetch-http-handler': 4.1.2 + '@smithy/hash-node': 3.0.11 + '@smithy/invalid-dependency': 3.0.11 + '@smithy/middleware-content-length': 3.0.13 + '@smithy/middleware-endpoint': 3.2.5 + '@smithy/middleware-retry': 3.0.30 + '@smithy/middleware-serde': 3.0.11 + '@smithy/middleware-stack': 3.0.11 + '@smithy/node-config-provider': 3.1.12 + '@smithy/node-http-handler': 3.3.2 + '@smithy/protocol-http': 4.1.8 + '@smithy/smithy-client': 3.5.0 + '@smithy/types': 3.7.2 + '@smithy/url-parser': 3.0.11 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.30 + '@smithy/util-defaults-mode-node': 3.0.30 + '@smithy/util-endpoints': 2.1.7 + '@smithy/util-middleware': 3.0.11 + '@smithy/util-retry': 3.0.11 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 transitivePeerDependencies: - aws-crt dev: false - /@aws-sdk/client-sts@3.319.0: - resolution: {integrity: sha512-PRGGKCSKtyM3x629J9j4DMsH1cQT8UGW+R67u9Q5HrMK05gfjpmg+X1DQ3pgve4D8MI4R/Cm3NkYl2eUTbQHQg==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-crypto/sha256-browser': 3.0.0 - '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/config-resolver': 3.310.0 - '@aws-sdk/credential-provider-node': 3.319.0 - '@aws-sdk/fetch-http-handler': 3.310.0 - '@aws-sdk/hash-node': 3.310.0 - '@aws-sdk/invalid-dependency': 3.310.0 - '@aws-sdk/middleware-content-length': 3.310.0 - '@aws-sdk/middleware-endpoint': 3.310.0 - '@aws-sdk/middleware-host-header': 3.310.0 - '@aws-sdk/middleware-logger': 3.310.0 - '@aws-sdk/middleware-recursion-detection': 3.310.0 - '@aws-sdk/middleware-retry': 3.310.0 - '@aws-sdk/middleware-sdk-sts': 3.310.0 - '@aws-sdk/middleware-serde': 3.310.0 - '@aws-sdk/middleware-signing': 3.310.0 - '@aws-sdk/middleware-stack': 3.310.0 - '@aws-sdk/middleware-user-agent': 3.319.0 - '@aws-sdk/node-config-provider': 3.310.0 - '@aws-sdk/node-http-handler': 3.310.0 - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/smithy-client': 3.316.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/url-parser': 3.310.0 - '@aws-sdk/util-base64': 3.310.0 - '@aws-sdk/util-body-length-browser': 3.310.0 - '@aws-sdk/util-body-length-node': 3.310.0 - '@aws-sdk/util-defaults-mode-browser': 3.316.0 - '@aws-sdk/util-defaults-mode-node': 3.316.0 - '@aws-sdk/util-endpoints': 3.319.0 - '@aws-sdk/util-retry': 3.310.0 - '@aws-sdk/util-user-agent-browser': 3.310.0 - '@aws-sdk/util-user-agent-node': 3.310.0 - '@aws-sdk/util-utf8': 3.310.0 - fast-xml-parser: 4.1.2 - tslib: 2.5.0 + /@aws-sdk/client-sts@3.709.0: + resolution: {integrity: sha512-cBAvlPg6yslXNL385UUGFPw+XY+lA9BzioNdIFkMo3fEUlTShogTtiWz4LsyLHoN6LhKojssP9DSmmWKWjCZIw==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/client-sso-oidc': 3.709.0(@aws-sdk/client-sts@3.709.0) + '@aws-sdk/core': 3.709.0 + '@aws-sdk/credential-provider-node': 3.709.0(@aws-sdk/client-sso-oidc@3.709.0)(@aws-sdk/client-sts@3.709.0) + '@aws-sdk/middleware-host-header': 3.709.0 + '@aws-sdk/middleware-logger': 3.709.0 + '@aws-sdk/middleware-recursion-detection': 3.709.0 + '@aws-sdk/middleware-user-agent': 3.709.0 + '@aws-sdk/region-config-resolver': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@aws-sdk/util-endpoints': 3.709.0 + '@aws-sdk/util-user-agent-browser': 3.709.0 + '@aws-sdk/util-user-agent-node': 3.709.0 + '@smithy/config-resolver': 3.0.13 + '@smithy/core': 2.5.5 + '@smithy/fetch-http-handler': 4.1.2 + '@smithy/hash-node': 3.0.11 + '@smithy/invalid-dependency': 3.0.11 + '@smithy/middleware-content-length': 3.0.13 + '@smithy/middleware-endpoint': 3.2.5 + '@smithy/middleware-retry': 3.0.30 + '@smithy/middleware-serde': 3.0.11 + '@smithy/middleware-stack': 3.0.11 + '@smithy/node-config-provider': 3.1.12 + '@smithy/node-http-handler': 3.3.2 + '@smithy/protocol-http': 4.1.8 + '@smithy/smithy-client': 3.5.0 + '@smithy/types': 3.7.2 + '@smithy/url-parser': 3.0.11 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.30 + '@smithy/util-defaults-mode-node': 3.0.30 + '@smithy/util-endpoints': 2.1.7 + '@smithy/util-middleware': 3.0.11 + '@smithy/util-retry': 3.0.11 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 transitivePeerDependencies: - aws-crt dev: false - /@aws-sdk/config-resolver@3.310.0: - resolution: {integrity: sha512-8vsT+/50lOqfDxka9m/rRt6oxv1WuGZoP8oPMk0Dt+TxXMbAzf4+rejBgiB96wshI1k3gLokYRjSQZn+dDtT8g==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-config-provider': 3.310.0 - '@aws-sdk/util-middleware': 3.310.0 - tslib: 2.5.0 + /@aws-sdk/core@3.709.0: + resolution: {integrity: sha512-7kuSpzdOTAE026j85wq/fN9UDZ70n0OHw81vFqMWwlEFtm5IQ/MRCLKcC4HkXxTdfy1PqFlmoXxWqeBa15tujw==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/types': 3.709.0 + '@smithy/core': 2.5.5 + '@smithy/node-config-provider': 3.1.12 + '@smithy/property-provider': 3.1.11 + '@smithy/protocol-http': 4.1.8 + '@smithy/signature-v4': 4.2.4 + '@smithy/smithy-client': 3.5.0 + '@smithy/types': 3.7.2 + '@smithy/util-middleware': 3.0.11 + fast-xml-parser: 4.4.1 + tslib: 2.6.2 dev: false - /@aws-sdk/credential-provider-env@3.310.0: - resolution: {integrity: sha512-vvIPQpI16fj95xwS7M3D48F7QhZJBnnCgB5lR+b7So+vsG9ibm1mZRVGzVpdxCvgyOhHFbvrby9aalNJmmIP1A==} - engines: {node: '>=14.0.0'} + /@aws-sdk/credential-provider-env@3.709.0: + resolution: {integrity: sha512-ZMAp9LSikvHDFVa84dKpQmow6wsg956Um20cKuioPpX2GGreJFur7oduD+tRJT6FtIOHn+64YH+0MwiXLhsaIQ==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-sdk/property-provider': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 + '@aws-sdk/core': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@smithy/property-provider': 3.1.11 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/credential-provider-imds@3.310.0: - resolution: {integrity: sha512-baxK7Zp6dai5AGW01FIW27xS2KAaPUmKLIXv5SvFYsUgXXvNW55im4uG3b+2gA0F7V+hXvVBH08OEqmwW6we5w==} - engines: {node: '>=14.0.0'} + /@aws-sdk/credential-provider-http@3.709.0: + resolution: {integrity: sha512-lIS7XLwCOyJnLD70f+VIRr8DNV1HPQe9oN6aguYrhoczqz7vDiVZLe3lh714cJqq9rdxzFypK5DqKHmcscMEPQ==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-sdk/node-config-provider': 3.310.0 - '@aws-sdk/property-provider': 3.310.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/url-parser': 3.310.0 - tslib: 2.5.0 + '@aws-sdk/core': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@smithy/fetch-http-handler': 4.1.2 + '@smithy/node-http-handler': 3.3.2 + '@smithy/property-provider': 3.1.11 + '@smithy/protocol-http': 4.1.8 + '@smithy/smithy-client': 3.5.0 + '@smithy/types': 3.7.2 + '@smithy/util-stream': 3.3.2 + tslib: 2.6.2 dev: false - /@aws-sdk/credential-provider-ini@3.319.0: - resolution: {integrity: sha512-pzx388Fw1KlSgmIMUyRY8DJVYM3aXpwzjprD4RiQVPJeAI+t7oQmEvd2FiUZEuHDjWXcuonxgU+dk7i7HUk/HQ==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/credential-provider-env': 3.310.0 - '@aws-sdk/credential-provider-imds': 3.310.0 - '@aws-sdk/credential-provider-process': 3.310.0 - '@aws-sdk/credential-provider-sso': 3.319.0 - '@aws-sdk/credential-provider-web-identity': 3.310.0 - '@aws-sdk/property-provider': 3.310.0 - '@aws-sdk/shared-ini-file-loader': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 + /@aws-sdk/credential-provider-ini@3.709.0(@aws-sdk/client-sso-oidc@3.709.0)(@aws-sdk/client-sts@3.709.0): + resolution: {integrity: sha512-qCF8IIGcPoUp+Ib3ANhbF5gElxFd+kIrtv2/1tKdvhudMANstQbMiWV0LTH47ZZR6c3as4iSrm09NZnpEoD/pA==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.709.0 + dependencies: + '@aws-sdk/client-sts': 3.709.0 + '@aws-sdk/core': 3.709.0 + '@aws-sdk/credential-provider-env': 3.709.0 + '@aws-sdk/credential-provider-http': 3.709.0 + '@aws-sdk/credential-provider-process': 3.709.0 + '@aws-sdk/credential-provider-sso': 3.709.0(@aws-sdk/client-sso-oidc@3.709.0) + '@aws-sdk/credential-provider-web-identity': 3.709.0(@aws-sdk/client-sts@3.709.0) + '@aws-sdk/types': 3.709.0 + '@smithy/credential-provider-imds': 3.2.8 + '@smithy/property-provider': 3.1.11 + '@smithy/shared-ini-file-loader': 3.1.12 + '@smithy/types': 3.7.2 + tslib: 2.6.2 transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' - aws-crt dev: false - /@aws-sdk/credential-provider-node@3.319.0: - resolution: {integrity: sha512-DS4a0Rdd7ZtMshoeE+zuSgbC05YBcdzd0h89u/eX+1Yqx+HCjeb8WXkbXsz0Mwx8q9TE04aS8f6Bw9J4x4mO5g==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/credential-provider-env': 3.310.0 - '@aws-sdk/credential-provider-imds': 3.310.0 - '@aws-sdk/credential-provider-ini': 3.319.0 - '@aws-sdk/credential-provider-process': 3.310.0 - '@aws-sdk/credential-provider-sso': 3.319.0 - '@aws-sdk/credential-provider-web-identity': 3.310.0 - '@aws-sdk/property-provider': 3.310.0 - '@aws-sdk/shared-ini-file-loader': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 + /@aws-sdk/credential-provider-node@3.709.0(@aws-sdk/client-sso-oidc@3.709.0)(@aws-sdk/client-sts@3.709.0): + resolution: {integrity: sha512-4HRX9KYWPSjO5O/Vg03YAsebKpvTjTvpK1n7zHYBmlLMBLxUrVsL1nNKKC5p2/7OW3RL8XR1ki3QkoV7kGRxUQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/credential-provider-env': 3.709.0 + '@aws-sdk/credential-provider-http': 3.709.0 + '@aws-sdk/credential-provider-ini': 3.709.0(@aws-sdk/client-sso-oidc@3.709.0)(@aws-sdk/client-sts@3.709.0) + '@aws-sdk/credential-provider-process': 3.709.0 + '@aws-sdk/credential-provider-sso': 3.709.0(@aws-sdk/client-sso-oidc@3.709.0) + '@aws-sdk/credential-provider-web-identity': 3.709.0(@aws-sdk/client-sts@3.709.0) + '@aws-sdk/types': 3.709.0 + '@smithy/credential-provider-imds': 3.2.8 + '@smithy/property-provider': 3.1.11 + '@smithy/shared-ini-file-loader': 3.1.12 + '@smithy/types': 3.7.2 + tslib: 2.6.2 transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@aws-sdk/client-sts' - aws-crt dev: false - /@aws-sdk/credential-provider-process@3.310.0: - resolution: {integrity: sha512-h73sg6GPMUWC+3zMCbA1nZ2O03nNJt7G96JdmnantiXBwHpRKWW8nBTLzx5uhXn6hTuTaoQRP/P+oxQJKYdMmA==} - engines: {node: '>=14.0.0'} + /@aws-sdk/credential-provider-process@3.709.0: + resolution: {integrity: sha512-IAC+jPlGQII6jhIylHOwh3RgSobqlgL59nw2qYTURr8hMCI0Z1p5y2ee646HTVt4WeCYyzUAXfxr6YI/Vitv+Q==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-sdk/property-provider': 3.310.0 - '@aws-sdk/shared-ini-file-loader': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 + '@aws-sdk/core': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@smithy/property-provider': 3.1.11 + '@smithy/shared-ini-file-loader': 3.1.12 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/credential-provider-sso@3.319.0: - resolution: {integrity: sha512-gAUnWH41lxkIbANXu+Rz5zS0Iavjjmpf3C56vAMT7oaYZ3Cg/Ys5l2SwAucQGOCA2DdS2hDiSI8E+Yhr4F5toA==} - engines: {node: '>=14.0.0'} + /@aws-sdk/credential-provider-sso@3.709.0(@aws-sdk/client-sso-oidc@3.709.0): + resolution: {integrity: sha512-rYdTDOxazS2GdGScelsRK5CAkktRLCCdRjlwXaxrcW57j749hEqxcF5uTv9RD6WBwInfedcSywErNZB+hylQlg==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-sdk/client-sso': 3.319.0 - '@aws-sdk/property-provider': 3.310.0 - '@aws-sdk/shared-ini-file-loader': 3.310.0 - '@aws-sdk/token-providers': 3.319.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 + '@aws-sdk/client-sso': 3.709.0 + '@aws-sdk/core': 3.709.0 + '@aws-sdk/token-providers': 3.709.0(@aws-sdk/client-sso-oidc@3.709.0) + '@aws-sdk/types': 3.709.0 + '@smithy/property-provider': 3.1.11 + '@smithy/shared-ini-file-loader': 3.1.12 + '@smithy/types': 3.7.2 + tslib: 2.6.2 transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' - aws-crt dev: false - /@aws-sdk/credential-provider-web-identity@3.310.0: - resolution: {integrity: sha512-H4SzuZXILNhK6/IR1uVvsUDZvzc051hem7GLyYghBCu8mU+tq28YhKE8MfSroi6eL2e5Vujloij1OM2EQQkPkw==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/property-provider': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/eventstream-codec@3.310.0: - resolution: {integrity: sha512-clIeSgWbZbxwtsxZ/yoedNM0/kJFSIjjHPikuDGhxhqc+vP6TN3oYyVMFrYwFaTFhk2+S5wZcWYMw8Op1pWo+A==} - dependencies: - '@aws-crypto/crc32': 3.0.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-hex-encoding': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/eventstream-serde-browser@3.310.0: - resolution: {integrity: sha512-3S6ziuQVALgEyz0TANGtYDVeG8ArK4Y05mcgrs8qUTmsvlDIXX37cR/DvmVbNB76M4IrsZeSAIajL9644CywkA==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/eventstream-serde-universal': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/eventstream-serde-config-resolver@3.310.0: - resolution: {integrity: sha512-8s1Qdn9STj+sV75nUp9yt0W6fHS4BZ2jTm4Z/1Pcbvh2Gqs0WjH5n2StS+pDW5Y9J/HSGBl0ogmUr5lC5bXFHg==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/eventstream-serde-node@3.310.0: - resolution: {integrity: sha512-kSnRomCgW43K9TmQYuwN9+AoYPnhyOKroanUMyZEzJk7rpCPMj4OzaUpXfDYOvznFNYn7NLaH6nHLJAr0VPlJA==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/eventstream-serde-universal': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/eventstream-serde-universal@3.310.0: - resolution: {integrity: sha512-Qyjt5k/waV5cDukpgT824ISZAz5U0pwzLz5ztR409u85AGNkF/9n7MS+LSyBUBSb0WJ5pUeSD47WBk+nLq9Nhw==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/eventstream-codec': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/fetch-http-handler@3.310.0: - resolution: {integrity: sha512-Bi9vIwzdkw1zMcvi/zGzlWS9KfIEnAq4NNhsnCxbQ4OoIRU9wvU+WGZdBBhxg0ZxZmpp1j1aZhU53lLjA07MHw==} - dependencies: - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/querystring-builder': 3.310.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-base64': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/hash-blob-browser@3.310.0: - resolution: {integrity: sha512-OoR8p0cbypToysLT0v3o2oyjy6+DKrY7GNCAzHOHJK9xmqXCt+DsjKoPeiY7o1sWX2aN6Plmvubj/zWxMKEn/A==} - dependencies: - '@aws-sdk/chunked-blob-reader': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/hash-node@3.310.0: - resolution: {integrity: sha512-NvE2fhRc8GRwCXBfDehxVAWCmVwVMILliAKVPAEr4yz2CkYs0tqU51S48x23dtna07H4qHtgpeNqVTthcIQOEQ==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-buffer-from': 3.310.0 - '@aws-sdk/util-utf8': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/hash-stream-node@3.310.0: - resolution: {integrity: sha512-ZoXdybNgvMz1Hl6k/e32xVL3jmG5p2IEk5mTtLfFEuskTJ74Z+VMYKkkF1whyy7KQfH83H+TQGnsGtlRCchQKw==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-utf8': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/invalid-dependency@3.310.0: - resolution: {integrity: sha512-1s5RG5rSPXoa/aZ/Kqr5U/7lqpx+Ry81GprQ2bxWqJvWQIJ0IRUwo5pk8XFxbKVr/2a+4lZT/c3OGoBOM1yRRA==} - dependencies: - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/is-array-buffer@3.310.0: - resolution: {integrity: sha512-urnbcCR+h9NWUnmOtet/s4ghvzsidFmspfhYaHAmSRdy9yDjdjBJMFjjsn85A1ODUktztm+cVncXjQ38WCMjMQ==} - engines: {node: '>=14.0.0'} + /@aws-sdk/credential-provider-web-identity@3.709.0(@aws-sdk/client-sts@3.709.0): + resolution: {integrity: sha512-2lbDfE0IQ6gma/7BB2JpkjW5G0wGe4AS0x80oybYAYYviJmUtIR3Cn2pXun6bnAWElt4wYKl4su7oC36rs5rNA==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sts': ^3.709.0 dependencies: - tslib: 2.5.0 + '@aws-sdk/client-sts': 3.709.0 + '@aws-sdk/core': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@smithy/property-provider': 3.1.11 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/lib-storage@3.319.0(@aws-sdk/abort-controller@3.374.0)(@aws-sdk/client-s3@3.319.0): - resolution: {integrity: sha512-/iWG61UTaUJO3Lfb/jhVk8BMVaFjiUplIqrDQTid2rcBEGJsepbuIL/+mas7redbN+4aOMQTioBZcTCapdef6Q==} - engines: {node: '>=14.0.0'} + /@aws-sdk/lib-storage@3.709.0(@aws-sdk/client-s3@3.709.0): + resolution: {integrity: sha512-TnP+QSsWdiaQYS5HuB3n9H947z49m6qSEv5fth4L9xinBldLepLyyF+cua3/GlagkWqpxcATISgR9pE1PB0mhQ==} + engines: {node: '>=16.0.0'} peerDependencies: - '@aws-sdk/abort-controller': ^3.0.0 - '@aws-sdk/client-s3': ^3.0.0 + '@aws-sdk/client-s3': ^3.709.0 dependencies: - '@aws-sdk/abort-controller': 3.374.0 - '@aws-sdk/client-s3': 3.319.0 - '@aws-sdk/middleware-endpoint': 3.310.0 - '@aws-sdk/smithy-client': 3.316.0 + '@aws-sdk/client-s3': 3.709.0 + '@smithy/abort-controller': 3.1.9 + '@smithy/middleware-endpoint': 3.2.5 + '@smithy/smithy-client': 3.5.0 buffer: 5.6.0 events: 3.3.0 stream-browserify: 3.0.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/md5-js@3.310.0: - resolution: {integrity: sha512-x5sRBUrEfLWAS1EhwbbDQ7cXq6uvBxh3qR2XAsnGvFFceTeAadk7cVogWxlk3PC+OCeeym7c3/6Bv2HQ2f1YyQ==} - dependencies: - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-utf8': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-bucket-endpoint@3.310.0: - resolution: {integrity: sha512-uJJfHI7v4AgbJZRLtyI8ap2QRWkBokGc3iyUoQ+dVNT3/CE2ZCu694A6W+H0dRqg79dIE+f9CRNdtLGa/Ehhvg==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-arn-parser': 3.310.0 - '@aws-sdk/util-config-provider': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-content-length@3.310.0: - resolution: {integrity: sha512-P8tQZxgDt6CAh1wd/W6WPzjc+uWPJwQkm+F7rAwRlM+k9q17HrhnksGDKcpuuLyIhPQYdmOMIkpKVgXGa4avhQ==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-endpoint@3.310.0: - resolution: {integrity: sha512-Z+N2vOL8K354/lstkClxLLsr6hCpVRh+0tCMXrVj66/NtKysCEZ/0b9LmqOwD9pWHNiI2mJqXwY0gxNlKAroUg==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/middleware-serde': 3.310.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/url-parser': 3.310.0 - '@aws-sdk/util-middleware': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-expect-continue@3.310.0: - resolution: {integrity: sha512-l3d1z2gt+gINJDnPSyu84IxfzjzPfCQrqC1sunw2cZGo/sXtEiq698Q3SiTcO2PGP4LBQAy2RHb5wVBJP708CQ==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-flexible-checksums@3.310.0: - resolution: {integrity: sha512-5ndnLgzgGVpWkmHBAiYkagHqiSuow8q62J4J6E2PzaQ77+fm8W3nfdy7hK5trHokEyouCZdxT/XK/IRhgj/4PA==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-crypto/crc32': 3.0.0 - '@aws-crypto/crc32c': 3.0.0 - '@aws-sdk/is-array-buffer': 3.310.0 - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-utf8': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-host-header@3.310.0: - resolution: {integrity: sha512-QWSA+46/hXorXyWa61ic2K7qZzwHTiwfk2e9mRRjeIRepUgI3qxFjsYqrWtrOGBjmFmq0pYIY8Bb/DCJuQqcoA==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-location-constraint@3.310.0: - resolution: {integrity: sha512-LFm0JTQWwTPWL/tZU2wsQTl8J5PpDEkXjEhaXVKamtyH0xhysRqd+0n92n65dc8oztAuQkb9xUbErGn5b6gsew==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-logger@3.310.0: - resolution: {integrity: sha512-Lurm8XofrASBRnAVtiSNuDSRsRqPNg27RIFLLsLp/pqog9nFJ0vz0kgdb9S5Z+zw83Mm+UlqOe6D8NTUNp4fVg==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-recursion-detection@3.310.0: - resolution: {integrity: sha512-SuB75/xk/gyue24gkriTwO2jFd7YcUGZDClQYuRejgbXSa3CO0lWyawQtfLcSSEBp9izrEVXuFH24K1eAft5nQ==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-retry@3.310.0: - resolution: {integrity: sha512-oTPsRy2W4s+dfxbJPW7Km+hHtv/OMsNsVfThAq8DDYKC13qlr1aAyOqGLD+dpBy2aKe7ss517Sy2HcHtHqm7/g==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/service-error-classification': 3.310.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-middleware': 3.310.0 - '@aws-sdk/util-retry': 3.310.0 - tslib: 2.5.0 - uuid: 8.3.2 - dev: false - - /@aws-sdk/middleware-sdk-s3@3.310.0: - resolution: {integrity: sha512-QK9x9g2ksg0hOjjYgqddeFcn5ctUEGdxJVu4OumPXceulefMcSO2jyH2qTybYSA93nqNQFdFmg5wQfvIRUWFCQ==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-arn-parser': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-sdk-sts@3.310.0: - resolution: {integrity: sha512-+5PFwlYNLvLLIfw0ASAoWV/iIF8Zv6R6QGtyP0CclhRSvNjgbQDVnV0g95MC5qvh+GB/Yjlkt8qAjLSPjHfsrQ==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/middleware-signing': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-serde@3.310.0: - resolution: {integrity: sha512-RNeeTVWSLTaentUeCgQKZhAl+C6hxtwD78cQWS10UymWpQFwbaxztzKUu4UQS5xA2j6PxwPRRUjqa4jcFjfLsg==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-signing@3.310.0: - resolution: {integrity: sha512-f9mKq+XMdW207Af3hKjdTnpNhdtwqWuvFs/ZyXoOkp/g1MY1O6L23Jy6i52m29LxbT4AuNRG1oKODfXM0vYVjQ==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/property-provider': 3.310.0 - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/signature-v4': 3.310.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-middleware': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-ssec@3.310.0: - resolution: {integrity: sha512-CnEwNKVpd5bXnrCKPaePF8mWTA9ET21OMBb54y9b0fd8K02zoOcdBz4DWfh1SjFD4HkgCdja4egd8l2ivyvqmw==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-stack@3.310.0: - resolution: {integrity: sha512-010O1PD+UAcZVKRvqEusE1KJqN96wwrf6QsqbRM0ywsKQ21NDweaHvEDlds2VHpgmofxkRLRu/IDrlPkKRQrRg==} - engines: {node: '>=14.0.0'} - dependencies: - tslib: 2.5.0 - dev: false - - /@aws-sdk/middleware-user-agent@3.319.0: - resolution: {integrity: sha512-ytaLx2dlR5AdMSne6FuDCISVg8hjyKj+cHU20b2CRA/E/z+XXrLrssp4JrCgizRKPPUep0psMIa22Zd6osTT5Q==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-endpoints': 3.319.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/node-config-provider@3.310.0: - resolution: {integrity: sha512-T/Pp6htc6hq/Cq+MLNDSyiwWCMVF6GqbBbXKVlO5L8rdHx4sq9xPdoPveZhGWrxvkanjA6eCwUp6E0riBOSVng==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/property-provider': 3.310.0 - '@aws-sdk/shared-ini-file-loader': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/node-http-handler@3.310.0: - resolution: {integrity: sha512-irv9mbcM9xC2xYjArQF5SYmHBMu4ciMWtGsoHII1nRuFOl9FoT4ffTvEPuLlfC6pznzvKt9zvnm6xXj7gDChKg==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/abort-controller': 3.310.0 - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/querystring-builder': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/property-provider@3.310.0: - resolution: {integrity: sha512-3lxDb0akV6BBzmFe4nLPaoliQbAifyWJhuvuDOu7e8NzouvpQXs0275w9LePhhcgjKAEVXUIse05ZW2DLbxo/g==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/protocol-http@3.310.0: - resolution: {integrity: sha512-fgZ1aw/irQtnrsR58pS8ThKOWo57Py3xX6giRvwSgZDEcxHfVzuQjy9yPuV++v04fdmdtgpbGf8WfvAAJ11yXQ==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/querystring-builder@3.310.0: - resolution: {integrity: sha512-ZHH8GV/80+pWGo7DzsvwvXR5xVxUHXUvPJPFAkhr6nCf78igdoF8gR10ScFoEKbtEapoNTaZlKHPXxpD8aPG7A==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-uri-escape': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/querystring-parser@3.310.0: - resolution: {integrity: sha512-YkIznoP6lsiIUHinx++/lbb3tlMURGGqMpo0Pnn32zYzGrJXA6eC3D0as2EcMjo55onTfuLcIiX4qzXes2MYOA==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/service-error-classification@3.310.0: - resolution: {integrity: sha512-PuyC7k3qfIKeH2LCnDwbttMOKq3qAx4buvg0yfnJtQOz6t1AR8gsnAq0CjKXXyfkXwNKWTqCpE6lVNUIkXgsMw==} - engines: {node: '>=14.0.0'} - dev: false - - /@aws-sdk/shared-ini-file-loader@3.310.0: - resolution: {integrity: sha512-N0q9pG0xSjQwc690YQND5bofm+4nfUviQ/Ppgan2kU6aU0WUq8KwgHJBto/YEEI+VlrME30jZJnxtOvcZJc2XA==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/signature-v4-multi-region@3.310.0: - resolution: {integrity: sha512-q8W+RIomTS/q85Ntgks/CoDElwqkC9+4OCicee5YznNHjQ4gtNWhUkYIyIRWRmXa/qx/AUreW9DM8FAecCOdng==} - engines: {node: '>=14.0.0'} - peerDependencies: - '@aws-sdk/signature-v4-crt': ^3.118.0 - peerDependenciesMeta: - '@aws-sdk/signature-v4-crt': - optional: true - dependencies: - '@aws-sdk/protocol-http': 3.310.0 - '@aws-sdk/signature-v4': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/signature-v4@3.310.0: - resolution: {integrity: sha512-1M60P1ZBNAjCFv9sYW29OF6okktaeibWyW3lMXqzoHF70lHBZh+838iUchznXUA5FLabfn4jBFWMRxlAXJUY2Q==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/is-array-buffer': 3.310.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-hex-encoding': 3.310.0 - '@aws-sdk/util-middleware': 3.310.0 - '@aws-sdk/util-uri-escape': 3.310.0 - '@aws-sdk/util-utf8': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/smithy-client@3.316.0: - resolution: {integrity: sha512-6YXOKbRnXeS8r8RWzuL6JMBolDYM5Wa4fD/VY6x/wK78i2xErHOvqzHgyyeLI1MMw4uqyd4wRNJNWC9TMPduXw==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/middleware-stack': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/token-providers@3.319.0: - resolution: {integrity: sha512-5utg6VL6Pl0uiLUn8ZJPYYxzCb9VRPsgJmGXktRUwq0YlTJ6ABcaxTXwZcC++sjh/qyCQDK5PPLNU5kIBttHMQ==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/client-sso-oidc': 3.319.0 - '@aws-sdk/property-provider': 3.310.0 - '@aws-sdk/shared-ini-file-loader': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - transitivePeerDependencies: - - aws-crt - dev: false - - /@aws-sdk/types@3.310.0: - resolution: {integrity: sha512-j8eamQJ7YcIhw7fneUfs8LYl3t01k4uHi4ZDmNRgtbmbmTTG3FZc2MotStZnp3nZB6vLiPF1o5aoJxWVvkzS6A==} - engines: {node: '>=14.0.0'} - dependencies: - tslib: 2.5.0 + tslib: 2.6.2 dev: false - /@aws-sdk/url-parser@3.310.0: - resolution: {integrity: sha512-mCLnCaSB9rQvAgx33u0DujLvr4d5yEm/W5r789GblwwQnlNXedVu50QRizMLTpltYWyAUoXjJgQnJHmJMaKXhw==} + /@aws-sdk/middleware-bucket-endpoint@3.709.0: + resolution: {integrity: sha512-03+tJOd7KIZOiqWH7Z8BOfQIWkKJgjcpKOJKZ6FR2KjWGUOE1G+bo11wF4UuHQ0RmpKnApt+pQghZmSnE7WEeg==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-sdk/querystring-parser': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 + '@aws-sdk/types': 3.709.0 + '@aws-sdk/util-arn-parser': 3.693.0 + '@smithy/node-config-provider': 3.1.12 + '@smithy/protocol-http': 4.1.8 + '@smithy/types': 3.7.2 + '@smithy/util-config-provider': 3.0.0 + tslib: 2.6.2 dev: false - /@aws-sdk/util-arn-parser@3.310.0: - resolution: {integrity: sha512-jL8509owp/xB9+Or0pvn3Fe+b94qfklc2yPowZZIFAkFcCSIdkIglz18cPDWnYAcy9JGewpMS1COXKIUhZkJsA==} - engines: {node: '>=14.0.0'} + /@aws-sdk/middleware-expect-continue@3.709.0: + resolution: {integrity: sha512-Tbl/DFvE4rHl8lMb9IzetwK4tf5R3VeHZkvEXQalsWoK0tbEQ8kXWi7wAYO4qbE7bFVvaxKX+irjJjTxf3BrCQ==} + engines: {node: '>=16.0.0'} dependencies: - tslib: 2.5.0 + '@aws-sdk/types': 3.709.0 + '@smithy/protocol-http': 4.1.8 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/util-base64@3.310.0: - resolution: {integrity: sha512-v3+HBKQvqgdzcbL+pFswlx5HQsd9L6ZTlyPVL2LS9nNXnCcR3XgGz9jRskikRUuUvUXtkSG1J88GAOnJ/apTPg==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/util-buffer-from': 3.310.0 - tslib: 2.5.0 + /@aws-sdk/middleware-flexible-checksums@3.709.0: + resolution: {integrity: sha512-wbYm9tkyCaqMeU82yjaXw7V5BxCSlSLNupENW63LC7Fvyo/aQzj6LjSMHcBpR2QwjBEhXCtF47L7aQ8SPTNhdw==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-crypto/crc32': 5.2.0 + '@aws-crypto/crc32c': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/core': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@smithy/is-array-buffer': 3.0.0 + '@smithy/node-config-provider': 3.1.12 + '@smithy/protocol-http': 4.1.8 + '@smithy/types': 3.7.2 + '@smithy/util-middleware': 3.0.11 + '@smithy/util-stream': 3.3.2 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 dev: false - /@aws-sdk/util-body-length-browser@3.310.0: - resolution: {integrity: sha512-sxsC3lPBGfpHtNTUoGXMQXLwjmR0zVpx0rSvzTPAuoVILVsp5AU/w5FphNPxD5OVIjNbZv9KsKTuvNTiZjDp9g==} + /@aws-sdk/middleware-host-header@3.709.0: + resolution: {integrity: sha512-8gQYCYAaIw4lOCd5WYdf15Y/61MgRsAnrb2eiTl+icMlUOOzl8aOl5iDwm/Idp0oHZTflwxM4XSvGXO83PRWcw==} + engines: {node: '>=16.0.0'} dependencies: - tslib: 2.5.0 + '@aws-sdk/types': 3.709.0 + '@smithy/protocol-http': 4.1.8 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/util-body-length-node@3.310.0: - resolution: {integrity: sha512-2tqGXdyKhyA6w4zz7UPoS8Ip+7sayOg9BwHNidiGm2ikbDxm1YrCfYXvCBdwaJxa4hJfRVz+aL9e+d3GqPI9pQ==} - engines: {node: '>=14.0.0'} + /@aws-sdk/middleware-location-constraint@3.709.0: + resolution: {integrity: sha512-5YQWPXfZq7OE0jB2G0PP8K10GBod/YPJXb+1CfJS6FbQaglRoIm8KZmVEvJNnptSKyGtE62veeCcCQcfAUfFig==} + engines: {node: '>=16.0.0'} dependencies: - tslib: 2.5.0 + '@aws-sdk/types': 3.709.0 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/util-buffer-from@3.310.0: - resolution: {integrity: sha512-i6LVeXFtGih5Zs8enLrt+ExXY92QV25jtEnTKHsmlFqFAuL3VBeod6boeMXkN2p9lbSVVQ1sAOOYZOHYbYkntw==} - engines: {node: '>=14.0.0'} + /@aws-sdk/middleware-logger@3.709.0: + resolution: {integrity: sha512-jDoGSccXv9zebnpUoisjWd5u5ZPIalrmm6TjvPzZ8UqzQt3Beiz0tnQwmxQD6KRc7ADweWP5Ntiqzbw9xpVajg==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-sdk/is-array-buffer': 3.310.0 - tslib: 2.5.0 + '@aws-sdk/types': 3.709.0 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/util-config-provider@3.310.0: - resolution: {integrity: sha512-xIBaYo8dwiojCw8vnUcIL4Z5tyfb1v3yjqyJKJWV/dqKUFOOS0U591plmXbM+M/QkXyML3ypon1f8+BoaDExrg==} - engines: {node: '>=14.0.0'} + /@aws-sdk/middleware-recursion-detection@3.709.0: + resolution: {integrity: sha512-PObL/wLr4lkfbQ0yXUWaoCWu/jcwfwZzCjsUiXW/H6hW9b/00enZxmx7OhtJYaR6xmh/Lcx5wbhIoDCbzdv0tw==} + engines: {node: '>=16.0.0'} dependencies: - tslib: 2.5.0 + '@aws-sdk/types': 3.709.0 + '@smithy/protocol-http': 4.1.8 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/util-defaults-mode-browser@3.316.0: - resolution: {integrity: sha512-6FSqLhYmaihtH2n1s4b2rlLW0ABU8N6VZIfzLfe2ING4PF0MzfaMMhnTFUHVXfKCVGoR8yP6iyFTRCyHGVEL1w==} - engines: {node: '>= 10.0.0'} - dependencies: - '@aws-sdk/property-provider': 3.310.0 - '@aws-sdk/types': 3.310.0 - bowser: 2.11.0 - tslib: 2.5.0 + /@aws-sdk/middleware-sdk-s3@3.709.0: + resolution: {integrity: sha512-FwtOG9t9xsLoLOQZ6qAdsWOjx9dsO6t28IjIDV1l6Ixiu2oC0Yks7goONjJUH0IDE4pDDDGzmuq0sn1XtHhheA==} + engines: {node: '>=16.0.0'} + dependencies: + '@aws-sdk/core': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@aws-sdk/util-arn-parser': 3.693.0 + '@smithy/core': 2.5.5 + '@smithy/node-config-provider': 3.1.12 + '@smithy/protocol-http': 4.1.8 + '@smithy/signature-v4': 4.2.4 + '@smithy/smithy-client': 3.5.0 + '@smithy/types': 3.7.2 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.11 + '@smithy/util-stream': 3.3.2 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 dev: false - /@aws-sdk/util-defaults-mode-node@3.316.0: - resolution: {integrity: sha512-dkYy10hdjPSScXXvnjGpZpnJxllkb6ICHgLMwZ4JczLHhPM12T/4PQ758YN8HS+muiYDGX1Bl2z1jd/bMcewBQ==} - engines: {node: '>= 10.0.0'} + /@aws-sdk/middleware-ssec@3.709.0: + resolution: {integrity: sha512-2muiLe7YkmlwZp2SKz+goZrDThGfRq3o0FcJF3Puc0XGmcEPEDjih537mCoTrGgcXNFlBc7YChd84r3t72ySaQ==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-sdk/config-resolver': 3.310.0 - '@aws-sdk/credential-provider-imds': 3.310.0 - '@aws-sdk/node-config-provider': 3.310.0 - '@aws-sdk/property-provider': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 + '@aws-sdk/types': 3.709.0 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/util-endpoints@3.319.0: - resolution: {integrity: sha512-3I64UMoYA2e2++oOUJXRcFtYLpLylnZFRltWfPo1B3dLlf+MIWat9djT+mMus+hW1ntLsvAIVu1hLVePJC0gvw==} - engines: {node: '>=14.0.0'} + /@aws-sdk/middleware-user-agent@3.709.0: + resolution: {integrity: sha512-ooc9ZJvgkjPhi9q05XwSfNTXkEBEIfL4hleo5rQBKwHG3aTHvwOM7LLzhdX56QZVa6sorPBp6fwULuRDSqiQHw==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 + '@aws-sdk/core': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@aws-sdk/util-endpoints': 3.709.0 + '@smithy/core': 2.5.5 + '@smithy/protocol-http': 4.1.8 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/util-hex-encoding@3.310.0: - resolution: {integrity: sha512-sVN7mcCCDSJ67pI1ZMtk84SKGqyix6/0A1Ab163YKn+lFBQRMKexleZzpYzNGxYzmQS6VanP/cfU7NiLQOaSfA==} - engines: {node: '>=14.0.0'} + /@aws-sdk/region-config-resolver@3.709.0: + resolution: {integrity: sha512-/NoCAMEVKAg3kBKOrNtgOfL+ECt6nrl+L7q2SyYmrcY4tVCmwuECVqewQaHc03fTnJijfKLccw0Fj+6wOCnB6w==} + engines: {node: '>=16.0.0'} dependencies: - tslib: 2.5.0 + '@aws-sdk/types': 3.709.0 + '@smithy/node-config-provider': 3.1.12 + '@smithy/types': 3.7.2 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.11 + tslib: 2.6.2 dev: false - /@aws-sdk/util-locate-window@3.310.0: - resolution: {integrity: sha512-qo2t/vBTnoXpjKxlsC2e1gBrRm80M3bId27r0BRB2VniSSe7bL1mmzM+/HFtujm0iAxtPM+aLEflLJlJeDPg0w==} - engines: {node: '>=14.0.0'} + /@aws-sdk/signature-v4-multi-region@3.709.0: + resolution: {integrity: sha512-m0vhJEy6SLbjL11K9cHzX/ZhCIj//1GkTbYk2d4tTQFSuPyJEkjmoeHk9dYm2mJy0wH48j29OJadI1JUsR5bOw==} + engines: {node: '>=16.0.0'} dependencies: - tslib: 2.5.0 + '@aws-sdk/middleware-sdk-s3': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@smithy/protocol-http': 4.1.8 + '@smithy/signature-v4': 4.2.4 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/util-middleware@3.310.0: - resolution: {integrity: sha512-FTSUKL/eRb9X6uEZClrTe27QFXUNNp7fxYrPndZwk1hlaOP5ix+MIHBcI7pIiiY/JPfOUmPyZOu+HetlFXjWog==} - engines: {node: '>=14.0.0'} + /@aws-sdk/token-providers@3.709.0(@aws-sdk/client-sso-oidc@3.709.0): + resolution: {integrity: sha512-q5Ar6k71nci43IbULFgC8a89d/3EHpmd7HvBzqVGRcHnoPwh8eZDBfbBXKH83NGwcS1qPSRYiDbVfeWPm4/1jA==} + engines: {node: '>=16.0.0'} + peerDependencies: + '@aws-sdk/client-sso-oidc': ^3.709.0 dependencies: - tslib: 2.5.0 + '@aws-sdk/client-sso-oidc': 3.709.0(@aws-sdk/client-sts@3.709.0) + '@aws-sdk/types': 3.709.0 + '@smithy/property-provider': 3.1.11 + '@smithy/shared-ini-file-loader': 3.1.12 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/util-retry@3.310.0: - resolution: {integrity: sha512-FwWGhCBLfoivTMUHu1LIn4NjrN9JLJ/aX5aZmbcPIOhZVFJj638j0qDgZXyfvVqBuBZh7M8kGq0Oahy3dp69OA==} - engines: {node: '>= 14.0.0'} + /@aws-sdk/types@3.709.0: + resolution: {integrity: sha512-ArtLTMxgjf13Kfu3gWH3Ez9Q5TkDdcRZUofpKH3pMGB/C6KAbeSCtIIDKfoRTUABzyGlPyCrZdnFjKyH+ypIpg==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-sdk/service-error-classification': 3.310.0 - tslib: 2.5.0 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/util-stream-browser@3.310.0: - resolution: {integrity: sha512-bysXZHwFwvbqOTCScCdCnoLk1K3GCo0HRIYEZuL7O7MHrQmfaYRXcaft/p22+GUv9VeFXS/eJJZ5r4u32az94w==} + /@aws-sdk/util-arn-parser@3.693.0: + resolution: {integrity: sha512-WC8x6ca+NRrtpAH64rWu+ryDZI3HuLwlEr8EU6/dbC/pt+r/zC0PBoC15VEygUaBA+isppCikQpGyEDu0Yj7gQ==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-sdk/fetch-http-handler': 3.310.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-base64': 3.310.0 - '@aws-sdk/util-hex-encoding': 3.310.0 - '@aws-sdk/util-utf8': 3.310.0 - tslib: 2.5.0 + tslib: 2.6.2 dev: false - /@aws-sdk/util-stream-node@3.310.0: - resolution: {integrity: sha512-hueAXFK0GVvnfYFgqbF7587xZfMZff5jlIFZOHqx7XVU7bl7qrRUCnphHk8H6yZ7RoQbDPcfmHJgtEoAJg1T1Q==} - engines: {node: '>=14.0.0'} + /@aws-sdk/util-endpoints@3.709.0: + resolution: {integrity: sha512-Mbc7AtL5WGCTKC16IGeUTz+sjpC3ptBda2t0CcK0kMVw3THDdcSq6ZlNKO747cNqdbwUvW34oHteUiHv4/z88Q==} + engines: {node: '>=16.0.0'} dependencies: - '@aws-sdk/node-http-handler': 3.310.0 - '@aws-sdk/types': 3.310.0 - '@aws-sdk/util-buffer-from': 3.310.0 - tslib: 2.5.0 + '@aws-sdk/types': 3.709.0 + '@smithy/types': 3.7.2 + '@smithy/util-endpoints': 2.1.7 + tslib: 2.6.2 dev: false - /@aws-sdk/util-uri-escape@3.310.0: - resolution: {integrity: sha512-drzt+aB2qo2LgtDoiy/3sVG8w63cgLkqFIa2NFlGpUgHFWTXkqtbgf4L5QdjRGKWhmZsnqkbtL7vkSWEcYDJ4Q==} + /@aws-sdk/util-locate-window@3.310.0: + resolution: {integrity: sha512-qo2t/vBTnoXpjKxlsC2e1gBrRm80M3bId27r0BRB2VniSSe7bL1mmzM+/HFtujm0iAxtPM+aLEflLJlJeDPg0w==} engines: {node: '>=14.0.0'} dependencies: - tslib: 2.5.0 + tslib: 2.6.2 dev: false - /@aws-sdk/util-user-agent-browser@3.310.0: - resolution: {integrity: sha512-yU/4QnHHuQ5z3vsUqMQVfYLbZGYwpYblPiuZx4Zo9+x0PBkNjYMqctdDcrpoH9Z2xZiDN16AmQGK1tix117ZKw==} + /@aws-sdk/util-user-agent-browser@3.709.0: + resolution: {integrity: sha512-/rL2GasJzdTWUURCQKFldw2wqBtY4k4kCiA2tVZSKg3y4Ey7zO34SW8ebaeCE2/xoWOyLR2/etdKyphoo4Zrtg==} dependencies: - '@aws-sdk/types': 3.310.0 + '@aws-sdk/types': 3.709.0 + '@smithy/types': 3.7.2 bowser: 2.11.0 - tslib: 2.5.0 + tslib: 2.6.2 dev: false - /@aws-sdk/util-user-agent-node@3.310.0: - resolution: {integrity: sha512-Ra3pEl+Gn2BpeE7KiDGpi4zj7WJXZA5GXnGo3mjbi9+Y3zrbuhJAbdZO3mO/o7xDgMC6ph4xCTbaSGzU6b6EDg==} - engines: {node: '>=14.0.0'} + /@aws-sdk/util-user-agent-node@3.709.0: + resolution: {integrity: sha512-trBfzSCVWy7ILgqhEXgiuM7hfRCw4F4a8IK90tjk9YL0jgoJ6eJuOp7+DfCtHJaygoBxD3cdMFkOu+lluFmGBA==} + engines: {node: '>=16.0.0'} peerDependencies: aws-crt: '>=1.0.0' peerDependenciesMeta: aws-crt: optional: true dependencies: - '@aws-sdk/node-config-provider': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/util-utf8-browser@3.259.0: - resolution: {integrity: sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw==} - dependencies: - tslib: 2.5.0 - dev: false - - /@aws-sdk/util-utf8@3.310.0: - resolution: {integrity: sha512-DnLfFT8uCO22uOJc0pt0DsSNau1GTisngBCDw8jQuWT5CqogMJu4b/uXmwEqfj8B3GX6Xsz8zOd6JpRlPftQoA==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/util-buffer-from': 3.310.0 - tslib: 2.5.0 - dev: false - - /@aws-sdk/util-waiter@3.310.0: - resolution: {integrity: sha512-AV5j3guH/Y4REu+Qh3eXQU9igljHuU4XjX2sADAgf54C0kkhcCCkkiuzk3IsX089nyJCqIcj5idbjdvpnH88Vw==} - engines: {node: '>=14.0.0'} - dependencies: - '@aws-sdk/abort-controller': 3.310.0 - '@aws-sdk/types': 3.310.0 - tslib: 2.5.0 + '@aws-sdk/middleware-user-agent': 3.709.0 + '@aws-sdk/types': 3.709.0 + '@smithy/node-config-provider': 3.1.12 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false - /@aws-sdk/xml-builder@3.310.0: - resolution: {integrity: sha512-TqELu4mOuSIKQCqj63fGVs86Yh+vBx5nHRpWKNUNhB2nPTpfbziTs5c1X358be3peVWA4wPxW7Nt53KIg1tnNw==} - engines: {node: '>=14.0.0'} + /@aws-sdk/xml-builder@3.709.0: + resolution: {integrity: sha512-2GPCwlNxeHspoK/Mc8nbk9cBOkSpp3j2SJUQmFnyQK6V/pR6II2oPRyZkMomug1Rc10hqlBHByMecq4zhV2uUw==} + engines: {node: '>=16.0.0'} dependencies: - tslib: 2.5.0 + '@smithy/types': 3.7.2 + tslib: 2.6.2 dev: false /@babel/cli@7.21.0(@babel/core@7.21.4): @@ -3300,18 +2972,484 @@ packages: '@sinonjs/commons': 1.8.6 dev: true - /@smithy/abort-controller@1.1.0: - resolution: {integrity: sha512-5imgGUlZL4dW4YWdMYAKLmal9ny/tlenM81QZY7xYyb76z9Z/QOg7oM5Ak9HQl8QfFTlGVWwcMXl+54jroRgEQ==} + /@smithy/abort-controller@3.1.9: + resolution: {integrity: sha512-yiW0WI30zj8ZKoSYNx90no7ugVn3khlyH/z5W8qtKBtVE6awRALbhSG+2SAHA1r6bO/6M9utxYKVZ3PCJ1rWxw==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/chunked-blob-reader-native@3.0.1: + resolution: {integrity: sha512-VEYtPvh5rs/xlyqpm5NRnfYLZn+q0SRPELbvBV+C/G7IQ+ouTuo+NKKa3ShG5OaFR8NYVMXls9hPYLTvIKKDrQ==} + dependencies: + '@smithy/util-base64': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/chunked-blob-reader@4.0.0: + resolution: {integrity: sha512-jSqRnZvkT4egkq/7b6/QRCNXmmYVcHwnJldqJ3IhVpQE2atObVJ137xmGeuGFhjFUr8gCEVAOKwSY79OvpbDaQ==} + dependencies: + tslib: 2.6.2 + dev: false + + /@smithy/config-resolver@3.0.13: + resolution: {integrity: sha512-Gr/qwzyPaTL1tZcq8WQyHhTZREER5R1Wytmz4WnVGL4onA3dNk6Btll55c8Vr58pLdvWZmtG8oZxJTw3t3q7Jg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/node-config-provider': 3.1.12 + '@smithy/types': 3.7.2 + '@smithy/util-config-provider': 3.0.0 + '@smithy/util-middleware': 3.0.11 + tslib: 2.6.2 + dev: false + + /@smithy/core@2.5.5: + resolution: {integrity: sha512-G8G/sDDhXA7o0bOvkc7bgai6POuSld/+XhNnWAbpQTpLv2OZPvyqQ58tLPPlz0bSNsXktldDDREIv1LczFeNEw==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/middleware-serde': 3.0.11 + '@smithy/protocol-http': 4.1.8 + '@smithy/types': 3.7.2 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-middleware': 3.0.11 + '@smithy/util-stream': 3.3.2 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/credential-provider-imds@3.2.8: + resolution: {integrity: sha512-ZCY2yD0BY+K9iMXkkbnjo+08T2h8/34oHd0Jmh6BZUSZwaaGlGCyBT/3wnS7u7Xl33/EEfN4B6nQr3Gx5bYxgw==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/node-config-provider': 3.1.12 + '@smithy/property-provider': 3.1.11 + '@smithy/types': 3.7.2 + '@smithy/url-parser': 3.0.11 + tslib: 2.6.2 + dev: false + + /@smithy/eventstream-codec@3.1.10: + resolution: {integrity: sha512-323B8YckSbUH0nMIpXn7HZsAVKHYHFUODa8gG9cHo0ySvA1fr5iWaNT+iIL0UCqUzG6QPHA3BSsBtRQou4mMqQ==} + dependencies: + '@aws-crypto/crc32': 5.2.0 + '@smithy/types': 3.7.2 + '@smithy/util-hex-encoding': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/eventstream-serde-browser@3.0.14: + resolution: {integrity: sha512-kbrt0vjOIihW3V7Cqj1SXQvAI5BR8SnyQYsandva0AOR307cXAc+IhPngxIPslxTLfxwDpNu0HzCAq6g42kCPg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/eventstream-serde-universal': 3.0.13 + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/eventstream-serde-config-resolver@3.0.11: + resolution: {integrity: sha512-P2pnEp4n75O+QHjyO7cbw/vsw5l93K/8EWyjNCAAybYwUmj3M+hjSQZ9P5TVdUgEG08ueMAP5R4FkuSkElZ5tQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/eventstream-serde-node@3.0.13: + resolution: {integrity: sha512-zqy/9iwbj8Wysmvi7Lq7XFLeDgjRpTbCfwBhJa8WbrylTAHiAu6oQTwdY7iu2lxigbc9YYr9vPv5SzYny5tCXQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/eventstream-serde-universal': 3.0.13 + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/eventstream-serde-universal@3.0.13: + resolution: {integrity: sha512-L1Ib66+gg9uTnqp/18Gz4MDpJPKRE44geOjOQ2SVc0eiaO5l255ADziATZgjQjqumC7yPtp1XnjHlF1srcwjKw==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/eventstream-codec': 3.1.10 + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/fetch-http-handler@4.1.2: + resolution: {integrity: sha512-R7rU7Ae3ItU4rC0c5mB2sP5mJNbCfoDc8I5XlYjIZnquyUwec7fEo78F6DA3SmgJgkU1qTMcZJuGblxZsl10ZA==} + dependencies: + '@smithy/protocol-http': 4.1.8 + '@smithy/querystring-builder': 3.0.11 + '@smithy/types': 3.7.2 + '@smithy/util-base64': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/hash-blob-browser@3.1.10: + resolution: {integrity: sha512-elwslXOoNunmfS0fh55jHggyhccobFkexLYC1ZeZ1xP2BTSrcIBaHV2b4xUQOdctrSNOpMqOZH1r2XzWTEhyfA==} + dependencies: + '@smithy/chunked-blob-reader': 4.0.0 + '@smithy/chunked-blob-reader-native': 3.0.1 + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/hash-node@3.0.11: + resolution: {integrity: sha512-emP23rwYyZhQBvklqTtwetkQlqbNYirDiEEwXl2v0GYWMnCzxst7ZaRAnWuy28njp5kAH54lvkdG37MblZzaHA==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/hash-stream-node@3.1.10: + resolution: {integrity: sha512-olomK/jZQ93OMayW1zfTHwcbwBdhcZOHsyWyiZ9h9IXvc1mCD/VuvzbLb3Gy/qNJwI4MANPLctTp2BucV2oU/Q==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/invalid-dependency@3.0.11: + resolution: {integrity: sha512-NuQmVPEJjUX6c+UELyVz8kUx8Q539EDeNwbRyu4IIF8MeV7hUtq1FB3SHVyki2u++5XLMFqngeMKk7ccspnNyQ==} + dependencies: + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/is-array-buffer@2.2.0: + resolution: {integrity: sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==} + engines: {node: '>=14.0.0'} + dependencies: + tslib: 2.6.2 + dev: false + + /@smithy/is-array-buffer@3.0.0: + resolution: {integrity: sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ==} + engines: {node: '>=16.0.0'} + dependencies: + tslib: 2.6.2 + dev: false + + /@smithy/md5-js@3.0.11: + resolution: {integrity: sha512-3NM0L3i2Zm4bbgG6Ymi9NBcxXhryi3uE8fIfHJZIOfZVxOkGdjdgjR9A06SFIZCfnEIWKXZdm6Yq5/aPXFFhsQ==} + dependencies: + '@smithy/types': 3.7.2 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/middleware-content-length@3.0.13: + resolution: {integrity: sha512-zfMhzojhFpIX3P5ug7jxTjfUcIPcGjcQYzB9t+rv0g1TX7B0QdwONW+ATouaLoD7h7LOw/ZlXfkq4xJ/g2TrIw==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/protocol-http': 4.1.8 + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/middleware-endpoint@3.2.5: + resolution: {integrity: sha512-VhJNs/s/lyx4weiZdXSloBgoLoS8osV0dKIain8nGmx7of3QFKu5BSdEuk1z/U8x9iwes1i+XCiNusEvuK1ijg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/core': 2.5.5 + '@smithy/middleware-serde': 3.0.11 + '@smithy/node-config-provider': 3.1.12 + '@smithy/shared-ini-file-loader': 3.1.12 + '@smithy/types': 3.7.2 + '@smithy/url-parser': 3.0.11 + '@smithy/util-middleware': 3.0.11 + tslib: 2.6.2 + dev: false + + /@smithy/middleware-retry@3.0.30: + resolution: {integrity: sha512-6323RL2BvAR3VQpTjHpa52kH/iSHyxd/G9ohb2MkBk2Ucu+oMtRXT8yi7KTSIS9nb58aupG6nO0OlXnQOAcvmQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/node-config-provider': 3.1.12 + '@smithy/protocol-http': 4.1.8 + '@smithy/service-error-classification': 3.0.11 + '@smithy/smithy-client': 3.5.0 + '@smithy/types': 3.7.2 + '@smithy/util-middleware': 3.0.11 + '@smithy/util-retry': 3.0.11 + tslib: 2.6.2 + uuid: 9.0.1 + dev: false + + /@smithy/middleware-serde@3.0.11: + resolution: {integrity: sha512-KzPAeySp/fOoQA82TpnwItvX8BBURecpx6ZMu75EZDkAcnPtO6vf7q4aH5QHs/F1s3/snQaSFbbUMcFFZ086Mw==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/middleware-stack@3.0.11: + resolution: {integrity: sha512-1HGo9a6/ikgOMrTrWL/WiN9N8GSVYpuRQO5kjstAq4CvV59bjqnh7TbdXGQ4vxLD3xlSjfBjq5t1SOELePsLnA==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/node-config-provider@3.1.12: + resolution: {integrity: sha512-O9LVEu5J/u/FuNlZs+L7Ikn3lz7VB9hb0GtPT9MQeiBmtK8RSY3ULmsZgXhe6VAlgTw0YO+paQx4p8xdbs43vQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/property-provider': 3.1.11 + '@smithy/shared-ini-file-loader': 3.1.12 + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/node-http-handler@3.3.2: + resolution: {integrity: sha512-t4ng1DAd527vlxvOfKFYEe6/QFBcsj7WpNlWTyjorwXXcKw3XlltBGbyHfSJ24QT84nF+agDha9tNYpzmSRZPA==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/abort-controller': 3.1.9 + '@smithy/protocol-http': 4.1.8 + '@smithy/querystring-builder': 3.0.11 + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/property-provider@3.1.11: + resolution: {integrity: sha512-I/+TMc4XTQ3QAjXfOcUWbSS073oOEAxgx4aZy8jHaf8JQnRkq2SZWw8+PfDtBvLUjcGMdxl+YwtzWe6i5uhL/A==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/protocol-http@4.1.8: + resolution: {integrity: sha512-hmgIAVyxw1LySOwkgMIUN0kjN8TG9Nc85LJeEmEE/cNEe2rkHDUWhnJf2gxcSRFLWsyqWsrZGw40ROjUogg+Iw==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/querystring-builder@3.0.11: + resolution: {integrity: sha512-u+5HV/9uJaeLj5XTb6+IEF/dokWWkEqJ0XiaRRogyREmKGUgZnNecLucADLdauWFKUNbQfulHFEZEdjwEBjXRg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + '@smithy/util-uri-escape': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/querystring-parser@3.0.11: + resolution: {integrity: sha512-Je3kFvCsFMnso1ilPwA7GtlbPaTixa3WwC+K21kmMZHsBEOZYQaqxcMqeFFoU7/slFjKDIpiiPydvdJm8Q/MCw==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/service-error-classification@3.0.11: + resolution: {integrity: sha512-QnYDPkyewrJzCyaeI2Rmp7pDwbUETe+hU8ADkXmgNusO1bgHBH7ovXJiYmba8t0fNfJx75fE8dlM6SEmZxheog==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + dev: false + + /@smithy/shared-ini-file-loader@3.1.12: + resolution: {integrity: sha512-1xKSGI+U9KKdbG2qDvIR9dGrw3CNx+baqJfyr0igKEpjbHL5stsqAesYBzHChYHlelWtb87VnLWlhvfCz13H8Q==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/signature-v4@4.2.4: + resolution: {integrity: sha512-5JWeMQYg81TgU4cG+OexAWdvDTs5JDdbEZx+Qr1iPbvo91QFGzjy0IkXAKaXUHqmKUJgSHK0ZxnCkgZpzkeNTA==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/is-array-buffer': 3.0.0 + '@smithy/protocol-http': 4.1.8 + '@smithy/types': 3.7.2 + '@smithy/util-hex-encoding': 3.0.0 + '@smithy/util-middleware': 3.0.11 + '@smithy/util-uri-escape': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/smithy-client@3.5.0: + resolution: {integrity: sha512-Y8FeOa7gbDfCWf7njrkoRATPa5eNLUEjlJS5z5rXatYuGkCb80LbHcu8AQR8qgAZZaNHCLyo2N+pxPsV7l+ivg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/core': 2.5.5 + '@smithy/middleware-endpoint': 3.2.5 + '@smithy/middleware-stack': 3.0.11 + '@smithy/protocol-http': 4.1.8 + '@smithy/types': 3.7.2 + '@smithy/util-stream': 3.3.2 + tslib: 2.6.2 + dev: false + + /@smithy/types@3.7.2: + resolution: {integrity: sha512-bNwBYYmN8Eh9RyjS1p2gW6MIhSO2rl7X9QeLM8iTdcGRP+eDiIWDt66c9IysCc22gefKszZv+ubV9qZc7hdESg==} + engines: {node: '>=16.0.0'} + dependencies: + tslib: 2.6.2 + dev: false + + /@smithy/url-parser@3.0.11: + resolution: {integrity: sha512-TmlqXkSk8ZPhfc+SQutjmFr5FjC0av3GZP4B/10caK1SbRwe/v+Wzu/R6xEKxoNqL+8nY18s1byiy6HqPG37Aw==} + dependencies: + '@smithy/querystring-parser': 3.0.11 + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/util-base64@3.0.0: + resolution: {integrity: sha512-Kxvoh5Qtt0CDsfajiZOCpJxgtPHXOKwmM+Zy4waD43UoEMA+qPxxa98aE/7ZhdnBFZFXMOiBR5xbcaMhLtznQQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/util-body-length-browser@3.0.0: + resolution: {integrity: sha512-cbjJs2A1mLYmqmyVl80uoLTJhAcfzMOyPgjwAYusWKMdLeNtzmMz9YxNl3/jRLoxSS3wkqkf0jwNdtXWtyEBaQ==} + dependencies: + tslib: 2.6.2 + dev: false + + /@smithy/util-body-length-node@3.0.0: + resolution: {integrity: sha512-Tj7pZ4bUloNUP6PzwhN7K386tmSmEET9QtQg0TgdNOnxhZvCssHji+oZTUIuzxECRfG8rdm2PMw2WCFs6eIYkA==} + engines: {node: '>=16.0.0'} + dependencies: + tslib: 2.6.2 + dev: false + + /@smithy/util-buffer-from@2.2.0: + resolution: {integrity: sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==} engines: {node: '>=14.0.0'} dependencies: - '@smithy/types': 1.2.0 + '@smithy/is-array-buffer': 2.2.0 + tslib: 2.6.2 + dev: false + + /@smithy/util-buffer-from@3.0.0: + resolution: {integrity: sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/is-array-buffer': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/util-config-provider@3.0.0: + resolution: {integrity: sha512-pbjk4s0fwq3Di/ANL+rCvJMKM5bzAQdE5S/6RL5NXgMExFAi6UgQMPOm5yPaIWPpr+EOXKXRonJ3FoxKf4mCJQ==} + engines: {node: '>=16.0.0'} + dependencies: + tslib: 2.6.2 + dev: false + + /@smithy/util-defaults-mode-browser@3.0.30: + resolution: {integrity: sha512-nLuGmgfcr0gzm64pqF2UT4SGWVG8UGviAdayDlVzJPNa6Z4lqvpDzdRXmLxtOdEjVlTOEdpZ9dd3ZMMu488mzg==} + engines: {node: '>= 10.0.0'} + dependencies: + '@smithy/property-provider': 3.1.11 + '@smithy/smithy-client': 3.5.0 + '@smithy/types': 3.7.2 + bowser: 2.11.0 + tslib: 2.6.2 + dev: false + + /@smithy/util-defaults-mode-node@3.0.30: + resolution: {integrity: sha512-OD63eWoH68vp75mYcfYyuVH+p7Li/mY4sYOROnauDrtObo1cS4uWfsy/zhOTW8F8ZPxQC1ZXZKVxoxvMGUv2Ow==} + engines: {node: '>= 10.0.0'} + dependencies: + '@smithy/config-resolver': 3.0.13 + '@smithy/credential-provider-imds': 3.2.8 + '@smithy/node-config-provider': 3.1.12 + '@smithy/property-provider': 3.1.11 + '@smithy/smithy-client': 3.5.0 + '@smithy/types': 3.7.2 tslib: 2.6.2 dev: false - /@smithy/types@1.2.0: - resolution: {integrity: sha512-z1r00TvBqF3dh4aHhya7nz1HhvCg4TRmw51fjMrh5do3h+ngSstt/yKlNbHeb9QxJmFbmN8KEVSWgb1bRvfEoA==} + /@smithy/util-endpoints@2.1.7: + resolution: {integrity: sha512-tSfcqKcN/Oo2STEYCABVuKgJ76nyyr6skGl9t15hs+YaiU06sgMkN7QYjo0BbVw+KT26zok3IzbdSOksQ4YzVw==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/node-config-provider': 3.1.12 + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/util-hex-encoding@3.0.0: + resolution: {integrity: sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ==} + engines: {node: '>=16.0.0'} + dependencies: + tslib: 2.6.2 + dev: false + + /@smithy/util-middleware@3.0.11: + resolution: {integrity: sha512-dWpyc1e1R6VoXrwLoLDd57U1z6CwNSdkM69Ie4+6uYh2GC7Vg51Qtan7ITzczuVpqezdDTKJGJB95fFvvjU/ow==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/util-retry@3.0.11: + resolution: {integrity: sha512-hJUC6W7A3DQgaee3Hp9ZFcOxVDZzmBIRBPlUAk8/fSOEl7pE/aX7Dci0JycNOnm9Mfr0KV2XjIlUOcGWXQUdVQ==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/service-error-classification': 3.0.11 + '@smithy/types': 3.7.2 + tslib: 2.6.2 + dev: false + + /@smithy/util-stream@3.3.2: + resolution: {integrity: sha512-sInAqdiVeisUGYAv/FrXpmJ0b4WTFmciTRqzhb7wVuem9BHvhIG7tpiYHLDWrl2stOokNZpTTGqz3mzB2qFwXg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/fetch-http-handler': 4.1.2 + '@smithy/node-http-handler': 3.3.2 + '@smithy/types': 3.7.2 + '@smithy/util-base64': 3.0.0 + '@smithy/util-buffer-from': 3.0.0 + '@smithy/util-hex-encoding': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/util-uri-escape@3.0.0: + resolution: {integrity: sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg==} + engines: {node: '>=16.0.0'} + dependencies: + tslib: 2.6.2 + dev: false + + /@smithy/util-utf8@2.3.0: + resolution: {integrity: sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==} engines: {node: '>=14.0.0'} dependencies: + '@smithy/util-buffer-from': 2.2.0 + tslib: 2.6.2 + dev: false + + /@smithy/util-utf8@3.0.0: + resolution: {integrity: sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/util-buffer-from': 3.0.0 + tslib: 2.6.2 + dev: false + + /@smithy/util-waiter@3.2.0: + resolution: {integrity: sha512-PpjSboaDUE6yl+1qlg3Si57++e84oXdWGbuFUSAciXsVfEZJJJupR2Nb0QuXHiunt2vGR+1PTizOMvnUPaG2Qg==} + engines: {node: '>=16.0.0'} + dependencies: + '@smithy/abort-controller': 3.1.9 + '@smithy/types': 3.7.2 tslib: 2.6.2 dev: false @@ -6131,8 +6269,8 @@ packages: resolution: {integrity: sha512-VhXlQgj9ioXCqGstD37E/HBeqEGV/qOD/kmbVG8h5xKBYvM1L3lR1Zn4555cQ8GkYbJa8aJSipLPndE1k6zK2w==} dev: false - /fast-xml-parser@4.1.2: - resolution: {integrity: sha512-CDYeykkle1LiA/uqQyNwYpFbyF6Axec6YapmpUP+/RHWIoR1zKjocdvNaTsxCxZzQ6v9MLXaSYm9Qq0thv0DHg==} + /fast-xml-parser@4.4.1: + resolution: {integrity: sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==} hasBin: true dependencies: strnum: 1.0.5 diff --git a/plugin-server/src/cdp/types.ts b/plugin-server/src/cdp/types.ts index 8a675e605e0175..e9d506a7a7823e 100644 --- a/plugin-server/src/cdp/types.ts +++ b/plugin-server/src/cdp/types.ts @@ -272,6 +272,7 @@ export type HogFunctionInputSchemaType = { integration?: string integration_key?: string integration_field?: 'slack_channel' + requiredScopes?: string } export type HogFunctionTypeType = 'destination' | 'email' | 'sms' | 'push' | 'activity' | 'alert' | 'broadcast' diff --git a/plugin-server/src/worker/ingestion/event-pipeline/populateTeamDataStep.ts b/plugin-server/src/worker/ingestion/event-pipeline/populateTeamDataStep.ts index de63033393f462..042eb197122a35 100644 --- a/plugin-server/src/worker/ingestion/event-pipeline/populateTeamDataStep.ts +++ b/plugin-server/src/worker/ingestion/event-pipeline/populateTeamDataStep.ts @@ -2,6 +2,7 @@ import { PluginEvent } from '@posthog/plugin-scaffold' import { eventDroppedCounter } from '../../../main/ingestion-queues/metrics' import { PipelineEvent } from '../../../types' +import { sanitizeString } from '../../../utils/db/utils' import { UUID } from '../../../utils/utils' import { captureIngestionWarning } from '../utils' import { tokenOrTeamPresentCounter } from './metrics' @@ -42,6 +43,9 @@ export async function populateTeamDataStep( } else if (event.team_id) { team = await runner.hub.teamManager.fetchTeam(event.team_id) } else if (event.token) { + // HACK: we've had null bytes end up in the token in the ingest pipeline before, for some reason. We should try to + // prevent this generally, but if it happens, we should at least simply fail to lookup the team, rather than crashing + event.token = sanitizeString(event.token) team = await runner.hub.teamManager.getTeamByToken(event.token) } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fceba00894a1fe..a1bda4e655675f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -305,8 +305,8 @@ dependencies: specifier: ^9.3.0 version: 9.3.0(postcss@8.4.31) posthog-js: - specifier: 1.194.6 - version: 1.194.6 + specifier: 1.200.2 + version: 1.200.2 posthog-js-lite: specifier: 3.0.0 version: 3.0.0 @@ -608,6 +608,9 @@ devDependencies: babel-plugin-import: specifier: ^1.13.0 version: 1.13.8 + caniuse-lite: + specifier: ^1.0.30001687 + version: 1.0.30001687 concurrently: specifier: ^5.3.0 version: 5.3.0 @@ -9635,7 +9638,7 @@ packages: postcss: ^8.1.0 dependencies: browserslist: 4.22.2 - caniuse-lite: 1.0.30001568 + caniuse-lite: 1.0.30001687 fraction.js: 4.2.0 normalize-range: 0.1.2 picocolors: 1.0.0 @@ -9651,7 +9654,7 @@ packages: postcss: ^8.1.0 dependencies: browserslist: 4.22.2 - caniuse-lite: 1.0.30001568 + caniuse-lite: 1.0.30001687 fraction.js: 4.3.7 normalize-range: 0.1.2 picocolors: 1.0.0 @@ -10015,7 +10018,7 @@ packages: engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true dependencies: - caniuse-lite: 1.0.30001538 + caniuse-lite: 1.0.30001687 electron-to-chromium: 1.4.492 node-releases: 2.0.13 update-browserslist-db: 1.0.11(browserslist@4.21.10) @@ -10025,7 +10028,7 @@ packages: engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true dependencies: - caniuse-lite: 1.0.30001568 + caniuse-lite: 1.0.30001687 electron-to-chromium: 1.4.609 node-releases: 2.0.14 update-browserslist-db: 1.0.13(browserslist@4.22.2) @@ -10092,6 +10095,14 @@ packages: write-file-atomic: 3.0.3 dev: true + /call-bind-apply-helpers@1.0.1: + resolution: {integrity: sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==} + engines: {node: '>= 0.4'} + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + dev: true + /call-bind@1.0.2: resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} dependencies: @@ -10115,6 +10126,14 @@ packages: get-intrinsic: 1.2.4 set-function-length: 1.2.2 + /call-bound@1.0.3: + resolution: {integrity: sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind-apply-helpers: 1.0.1 + get-intrinsic: 1.2.6 + dev: true + /callsites@3.1.0: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} @@ -10159,16 +10178,13 @@ packages: resolution: {integrity: sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==} dependencies: browserslist: 4.22.2 - caniuse-lite: 1.0.30001568 + caniuse-lite: 1.0.30001687 lodash.memoize: 4.1.2 lodash.uniq: 4.5.0 dev: false - /caniuse-lite@1.0.30001538: - resolution: {integrity: sha512-HWJnhnID+0YMtGlzcp3T9drmBJUVDchPJ08tpUGFLs9CYlwWPH2uLgpHn8fND5pCgXVtnGS3H4QR9XLMHVNkHw==} - - /caniuse-lite@1.0.30001568: - resolution: {integrity: sha512-vSUkH84HontZJ88MiNrOau1EBrCqEQYgkC5gIySiDlpsm8sGVrhU7Kx4V6h0tnqaHzIHZv08HlJIwPbL4XL9+A==} + /caniuse-lite@1.0.30001687: + resolution: {integrity: sha512-0S/FDhf4ZiqrTUiQ39dKeUjYRjkv7lOZU1Dgif2rIqrTzX/1wV2hfKu9TOm1IHkdSijfLswxTFzl/cvir+SLSQ==} /case-anything@2.1.10: resolution: {integrity: sha512-JczJwVrCP0jPKh05McyVsuOg6AYosrB9XWZKbQzXeDAm2ClE/PJE/BcrrQrVyGYH7Jg8V/LDupmyL4kFlVsVFQ==} @@ -11783,6 +11799,15 @@ packages: engines: {node: '>=12'} dev: true + /dunder-proto@1.0.0: + resolution: {integrity: sha512-9+Sj30DIu+4KvHqMfLUGLFYL2PkURSYMVXJyXe92nFRvlYq5hBjLEhblKB+vkd/WVlUYMWigiY07T91Fkk0+4A==} + engines: {node: '>= 0.4'} + dependencies: + call-bind-apply-helpers: 1.0.1 + es-errors: 1.3.0 + gopd: 1.2.0 + dev: true + /duplexify@3.7.1: resolution: {integrity: sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==} dependencies: @@ -12003,6 +12028,11 @@ packages: dependencies: get-intrinsic: 1.2.4 + /es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + dev: true + /es-errors@1.3.0: resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} engines: {node: '>= 0.4'} @@ -12046,6 +12076,13 @@ packages: resolution: {integrity: sha512-cXLGjP0c4T3flZJKQSuziYoq7MlT+rnvfZjfp7h+I7K9BNX54kP9nyWvdbwjQ4u1iWbOL4u96fgeZLToQlZC7w==} dev: true + /es-object-atoms@1.0.0: + resolution: {integrity: sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==} + engines: {node: '>= 0.4'} + dependencies: + es-errors: 1.3.0 + dev: true + /es-set-tostringtag@2.0.2: resolution: {integrity: sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==} engines: {node: '>= 0.4'} @@ -12281,7 +12318,7 @@ packages: '@mdn/browser-compat-data': 5.3.16 ast-metadata-inferer: 0.8.0 browserslist: 4.21.10 - caniuse-lite: 1.0.30001538 + caniuse-lite: 1.0.30001687 eslint: 8.57.0 find-up: 5.0.0 lodash.memoize: 4.1.2 @@ -13266,6 +13303,22 @@ packages: has-symbols: 1.0.3 hasown: 2.0.2 + /get-intrinsic@1.2.6: + resolution: {integrity: sha512-qxsEs+9A+u85HhllWJJFicJfPDhRmjzoYdl64aMWW9yRIJmSyxdn8IEkuIM530/7T+lv0TIHd8L6Q/ra0tEoeA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind-apply-helpers: 1.0.1 + dunder-proto: 1.0.0 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.0.0 + function-bind: 1.1.2 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.0.0 + dev: true + /get-nonce@1.0.1: resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==} engines: {node: '>=6'} @@ -13500,6 +13553,11 @@ packages: dependencies: get-intrinsic: 1.2.2 + /gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + dev: true + /graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} @@ -13575,6 +13633,11 @@ packages: resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} engines: {node: '>= 0.4'} + /has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + dev: true + /has-tostringtag@1.0.0: resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==} engines: {node: '>= 0.4'} @@ -14010,7 +14073,7 @@ packages: hogan.js: 3.0.2 htm: 3.1.1 instantsearch-ui-components: 0.3.0 - preact: 10.25.1 + preact: 10.25.2 qs: 6.9.7 search-insights: 2.13.0 dev: false @@ -15933,6 +15996,11 @@ packages: resolution: {integrity: sha512-6qE4B9deFBIa9YSpOc9O0Sgc43zTeVYbgDT5veRKSlB2+ZuHNoVVxA1L/ckMUayV9Ay9y7Z/SZCLcGteW9i7bg==} dev: false + /math-intrinsics@1.0.0: + resolution: {integrity: sha512-4MqMiKP90ybymYvsut0CH2g4XWbfLtmlCkXmtmdcDCxNB+mQcu1w/1+L/VD7vi/PSv7X2JYV7SCcR+jiPXnQtA==} + engines: {node: '>= 0.4'} + dev: true + /mathml-tag-names@2.1.3: resolution: {integrity: sha512-APMBEanjybaPzUrfqU0IMU5I0AswKMH7k8OTLs0vvV4KZpExkTkY87nR/zpbuTPj+gARop7aGUbl11pnDfW6xg==} dev: true @@ -16472,6 +16540,11 @@ packages: resolution: {integrity: sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==} dev: true + /object-inspect@1.13.3: + resolution: {integrity: sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==} + engines: {node: '>= 0.4'} + dev: true + /object-is@1.1.5: resolution: {integrity: sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==} engines: {node: '>= 0.4'} @@ -17829,12 +17902,12 @@ packages: resolution: {integrity: sha512-dyajjnfzZD1tht4N7p7iwf7nBnR1MjVaVu+MKr+7gBgA39bn28wizCIJZztZPtHy4PY0YwtSGgwfBCuG/hnHgA==} dev: false - /posthog-js@1.194.6: - resolution: {integrity: sha512-5g5n7FjWLha/QWVTeWeMErGff21v4/V3wYCZ2z8vAbHaCyHkaDBEbuM756jMFBQMsq3HJcDX9mlxi2HhAHxq2A==} + /posthog-js@1.200.2: + resolution: {integrity: sha512-hDdnzn/FWz+lR0qoYn8TJ7UAVzJSH48ceM2rYXrrZZa8EqBKaUKLf1LWK505/s3QVjK972mbF8wjF+pRDSlwOg==} dependencies: core-js: 3.39.0 fflate: 0.4.8 - preact: 10.25.1 + preact: 10.25.2 web-vitals: 4.2.4 dev: false @@ -17842,8 +17915,8 @@ packages: resolution: {integrity: sha512-Q+/tYsFU9r7xoOJ+y/ZTtdVQwTWfzjbiXBDMM/JKUux3+QPP02iUuIoeBQ+Ot6oEDlC+/PGjB/5A3K7KKb7hcw==} dev: false - /preact@10.25.1: - resolution: {integrity: sha512-frxeZV2vhQSohQwJ7FvlqC40ze89+8friponWUFeVEkaCfhC6Eu4V0iND5C9CXz8JLndV07QRDeXzH1+Anz5Og==} + /preact@10.25.2: + resolution: {integrity: sha512-GEts1EH3oMnqdOIeXhlbBSddZ9nrINd070WBOiPO2ous1orrKGUM4SMDbwyjSWD1iMS2dBvaDjAa5qUhz3TXqw==} dev: false /prelude-ls@1.2.1: @@ -18212,7 +18285,7 @@ packages: resolution: {integrity: sha512-EJPeIn0CYrGu+hli1xilKAPXODtJ12T0sP63Ijx2/khC2JtuaN3JyNIpvmnkmaEtha9ocbG4A4cMcr+TvqvwQg==} engines: {node: '>=0.6'} dependencies: - side-channel: 1.0.6 + side-channel: 1.1.0 dev: true /qs@6.9.7: @@ -18447,7 +18520,7 @@ packages: react: '>=15' dependencies: react: 18.2.0 - unlayer-types: 1.169.0 + unlayer-types: 1.182.0 dev: false /react-error-boundary@3.1.4(react@18.2.0): @@ -19614,6 +19687,35 @@ packages: engines: {node: '>=8'} dev: true + /side-channel-list@1.0.0: + resolution: {integrity: sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==} + engines: {node: '>= 0.4'} + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.3 + dev: true + + /side-channel-map@1.0.1: + resolution: {integrity: sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==} + engines: {node: '>= 0.4'} + dependencies: + call-bound: 1.0.3 + es-errors: 1.3.0 + get-intrinsic: 1.2.6 + object-inspect: 1.13.3 + dev: true + + /side-channel-weakmap@1.0.2: + resolution: {integrity: sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==} + engines: {node: '>= 0.4'} + dependencies: + call-bound: 1.0.3 + es-errors: 1.3.0 + get-intrinsic: 1.2.6 + object-inspect: 1.13.3 + side-channel-map: 1.0.1 + dev: true + /side-channel@1.0.4: resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} dependencies: @@ -19631,6 +19733,17 @@ packages: object-inspect: 1.13.1 dev: true + /side-channel@1.1.0: + resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==} + engines: {node: '>= 0.4'} + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.3 + side-channel-list: 1.0.0 + side-channel-map: 1.0.1 + side-channel-weakmap: 1.0.2 + dev: true + /signal-exit@3.0.7: resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} dev: true @@ -20994,8 +21107,8 @@ packages: resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} engines: {node: '>= 10.0.0'} - /unlayer-types@1.169.0: - resolution: {integrity: sha512-BXT4C/MzrYIiiEXHOovtquGehX4IXLYKx3qo4SwYGmqmaGMvZLKvt2rDlO+DKUjVnvGHxgre/tAbUl3I5Gnpxg==} + /unlayer-types@1.182.0: + resolution: {integrity: sha512-x+YSeA7/Wb/znKDtRws8M3Mu6TyKP3d+MddPVX/iUyDPVEOapoPWk0QxjIaNYtWt6troADZdhzgr2EwsZ61HrA==} dev: false /unpipe@1.0.0: diff --git a/posthog/api/decide.py b/posthog/api/decide.py index 8b51dc6aaf7746..98b331de0472df 100644 --- a/posthog/api/decide.py +++ b/posthog/api/decide.py @@ -1,5 +1,5 @@ from random import random -from typing import Union, cast +from typing import Any, Union import structlog from django.conf import settings @@ -14,8 +14,7 @@ from posthog.api.utils import ( get_project_id, get_token, - hostname_in_allowed_url_list, - parse_domain, + on_permitted_recording_domain, ) from posthog.database_healthcheck import DATABASE_FOR_FLAG_MATCHING from posthog.exceptions import ( @@ -30,6 +29,7 @@ from posthog.models.feature_flag import get_all_feature_flags from posthog.models.feature_flag.flag_analytics import increment_request_count from posthog.models.filters.mixins.utils import process_bool +from posthog.models.remote_config import RemoteConfig from posthog.models.utils import execute_with_timeout from posthog.plugins.site import get_decide_site_apps from posthog.utils import ( @@ -46,30 +46,26 @@ ) -def on_permitted_recording_domain(team: Team, request: HttpRequest) -> bool: - origin = parse_domain(request.headers.get("Origin")) - referer = parse_domain(request.headers.get("Referer")) - user_agent = request.META.get("HTTP_USER_AGENT") - - is_authorized_web_client: bool = hostname_in_allowed_url_list( - team.recording_domains, origin - ) or hostname_in_allowed_url_list(team.recording_domains, referer) - # TODO this is a short term fix for beta testers - # TODO we will match on the app identifier in the origin instead and allow users to auth those - is_authorized_mobile_client: bool = user_agent is not None and any( - keyword in user_agent - for keyword in ["posthog-android", "posthog-ios", "posthog-react-native", "posthog-flutter"] +def get_base_config(token: str, team: Team, request: HttpRequest, skip_db: bool = False) -> dict: + # Check for query param "use_remote_config" + use_remote_config = request.GET.get("use_remote_config") == "true" or token in ( + settings.DECIDE_TOKENS_FOR_REMOTE_CONFIG or [] ) - return is_authorized_web_client or is_authorized_mobile_client + if use_remote_config: + response = RemoteConfig.get_config_via_token(token, request=request) + # Add in a bunch of backwards compatibility stuff + response["isAuthenticated"] = False + response["toolbarParams"] = {} + response["config"] = {"enable_collect_everything": True} + response["surveys"] = True if len(response["surveys"]) > 0 else False -@csrf_exempt -@timed("posthog_cloud_decide_endpoint") -def get_decide(request: HttpRequest): - # handle cors request - if request.method == "OPTIONS": - return cors_response(request, JsonResponse({"status": 1})) + # Remove some stuff that is specific to the new RemoteConfig + del response["hasFeatureFlags"] + del response["token"] + + return response response = { "config": {"enable_collect_everything": True}, @@ -81,256 +77,287 @@ def get_decide(request: HttpRequest): "sessionRecording": False, } - if request.method == "POST": - try: - data = load_data_from_request(request) - api_version_string = request.GET.get("v") - # NOTE: This does not support semantic versioning e.g. 2.1.0 - api_version = int(api_version_string) if api_version_string else 1 - except ValueError: - # default value added because of bug in posthog-js 1.19.0 - # see https://sentry.io/organizations/posthog2/issues/2738865125/?project=1899813 - # as a tombstone if the below statsd counter hasn't seen errors for N days - # then it is likely that no clients are running posthog-js 1.19.0 - # and this defaulting could be removed - statsd.incr( - f"posthog_cloud_decide_defaulted_api_version_on_value_error", - tags={"endpoint": "decide", "api_version_string": api_version_string}, - ) - api_version = 2 - except UnspecifiedCompressionFallbackParsingError as error: - # Notably don't capture this exception as it's not caused by buggy behavior, - # it's just a fallback for when we can't parse the request due to a missing header - # that we attempted to kludge by manually setting the compression type to gzip - # If this kludge fails, though all we need to do is return a 400 and move on - return cors_response( - request, - generate_exception_response("decide", f"Malformed request data: {error}", code="malformed_data"), - ) - except RequestParsingError as error: - capture_exception(error) # We still capture this on Sentry to identify actual potential bugs - return cors_response( - request, - generate_exception_response("decide", f"Malformed request data: {error}", code="malformed_data"), - ) + response["captureDeadClicks"] = True if team.capture_dead_clicks else False + + capture_network_timing = True if team.capture_performance_opt_in else False + capture_web_vitals = True if team.autocapture_web_vitals_opt_in else False + autocapture_web_vitals_allowed_metrics = None + if capture_web_vitals: + autocapture_web_vitals_allowed_metrics = team.autocapture_web_vitals_allowed_metrics + response["capturePerformance"] = ( + { + "network_timing": capture_network_timing, + "web_vitals": capture_web_vitals, + "web_vitals_allowed_metrics": autocapture_web_vitals_allowed_metrics, + } + if capture_network_timing or capture_web_vitals + else False + ) - token = get_token(data, request) - team = Team.objects.get_team_from_cache_or_token(token) - if team is None and token: - project_id = get_project_id(data, request) + response["autocapture_opt_out"] = True if team.autocapture_opt_out else False + response["autocaptureExceptions"] = ( + { + "endpoint": "/e/", + } + if team.autocapture_exceptions_opt_in + else False + ) - if not project_id: - return cors_response( - request, - generate_exception_response( - "decide", - "Project API key invalid. You can find your project API key in PostHog project settings.", - code="invalid_api_key", - type="authentication_error", - status_code=status.HTTP_401_UNAUTHORIZED, - ), - ) + # this not settings.DEBUG check is a lazy workaround because + # NEW_ANALYTICS_CAPTURE_ENDPOINT doesn't currently work in DEBUG mode + if not settings.DEBUG and str(team.id) not in (settings.NEW_ANALYTICS_CAPTURE_EXCLUDED_TEAM_IDS or []): + response["analytics"] = {"endpoint": settings.NEW_ANALYTICS_CAPTURE_ENDPOINT} - user = User.objects.get_from_personal_api_key(token) - if user is None: - return cors_response( - request, - generate_exception_response( - "decide", - "Invalid Personal API key.", - code="invalid_personal_key", - type="authentication_error", - status_code=status.HTTP_401_UNAUTHORIZED, - ), - ) - team = user.teams.get(id=project_id) + if str(team.id) not in (settings.ELEMENT_CHAIN_AS_STRING_EXCLUDED_TEAMS or []): + response["elementsChainAsString"] = True - if team: - if team.id in settings.DECIDE_SHORT_CIRCUITED_TEAM_IDS: - return cors_response( - request, - generate_exception_response( - "decide", - f"Team with ID {team.id} cannot access the /decide endpoint." - f"Please contact us at hey@posthog.com", - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - ), - ) + response["sessionRecording"] = _session_recording_config_response(request, team) - token = cast(str, token) # we know it's not None if we found a team - structlog.contextvars.bind_contextvars(team_id=team.id) - - disable_flags = process_bool(data.get("disable_flags")) is True - feature_flags = None - errors = False - if not disable_flags: - distinct_id = data.get("distinct_id") - if distinct_id is None: - return cors_response( - request, - generate_exception_response( - "decide", - "Decide requires a distinct_id.", - code="missing_distinct_id", - type="validation_error", - status_code=status.HTTP_400_BAD_REQUEST, - ), - ) - else: - distinct_id = str(distinct_id) + if settings.DECIDE_SESSION_REPLAY_QUOTA_CHECK: + from ee.billing.quota_limiting import ( + QuotaLimitingCaches, + QuotaResource, + list_limited_team_attributes, + ) - property_overrides = {} - geoip_enabled = process_bool(data.get("geoip_disable")) is False + limited_tokens_recordings = list_limited_team_attributes( + QuotaResource.RECORDINGS, QuotaLimitingCaches.QUOTA_LIMITER_CACHE_KEY + ) - if geoip_enabled: - property_overrides = get_geoip_properties(get_ip_address(request)) + if token in limited_tokens_recordings: + response["quotaLimited"] = ["recordings"] + response["sessionRecording"] = False - all_property_overrides: dict[str, Union[str, int]] = { - **property_overrides, - **(data.get("person_properties") or {}), - } + response["surveys"] = True if team.surveys_opt_in else False + response["heatmaps"] = True if team.heatmaps_opt_in else False + response["defaultIdentifiedOnly"] = True # Support old SDK versions with setting that is now the default - feature_flags, _, feature_flag_payloads, errors = get_all_feature_flags( - team.pk, - distinct_id, - data.get("groups") or {}, - hash_key_override=data.get("$anon_distinct_id"), - property_value_overrides=all_property_overrides, - group_property_value_overrides=(data.get("group_properties") or {}), - ) + site_apps = [] + # errors mean the database is unavailable, bail in this case + if team.inject_web_apps and not skip_db: + try: + with execute_with_timeout(200, DATABASE_FOR_FLAG_MATCHING): + site_apps = get_decide_site_apps(team, using_database=DATABASE_FOR_FLAG_MATCHING) + except Exception: + pass - active_flags = {key: value for key, value in feature_flags.items() if value} + response["siteApps"] = site_apps + + return response - if api_version == 2: - response["featureFlags"] = active_flags - elif api_version >= 3: - # v3 returns all flags, not just active ones, as well as if there was an error computing all flags - response["featureFlags"] = feature_flags - response["errorsWhileComputingFlags"] = errors - response["featureFlagPayloads"] = feature_flag_payloads - else: - # default v1 - response["featureFlags"] = list(active_flags.keys()) - - # metrics for feature flags - team_id_label = label_for_team_id_to_track(team.pk) - FLAG_EVALUATION_COUNTER.labels( - team_id=team_id_label, - errors_computing=errors, - has_hash_key_override=bool(data.get("$anon_distinct_id")), - ).inc() - else: - response["featureFlags"] = {} - response["captureDeadClicks"] = True if team.capture_dead_clicks else False +@csrf_exempt +@timed("posthog_cloud_decide_endpoint") +def get_decide(request: HttpRequest): + # handle cors request + if request.method == "OPTIONS": + return cors_response(request, JsonResponse({"status": 1})) - capture_network_timing = True if team.capture_performance_opt_in else False - capture_web_vitals = True if team.autocapture_web_vitals_opt_in else False - autocapture_web_vitals_allowed_metrics = None - if capture_web_vitals: - autocapture_web_vitals_allowed_metrics = team.autocapture_web_vitals_allowed_metrics - response["capturePerformance"] = ( + if request.method != "POST": + statsd.incr(f"posthog_cloud_raw_endpoint_success", tags={"endpoint": "decide"}) + return cors_response( + request, + JsonResponse( { - "network_timing": capture_network_timing, - "web_vitals": capture_web_vitals, - "web_vitals_allowed_metrics": autocapture_web_vitals_allowed_metrics, + "config": {"enable_collect_everything": True}, + "toolbarParams": {}, + "isAuthenticated": False, + # gzip and gzip-js are aliases for the same compression algorithm + "supportedCompression": ["gzip", "gzip-js"], + "featureFlags": [], + "sessionRecording": False, } - if capture_network_timing or capture_web_vitals - else False + ), + ) + + try: + data = load_data_from_request(request) + api_version_string = request.GET.get("v") + # NOTE: This does not support semantic versioning e.g. 2.1.0 + api_version = int(api_version_string) if api_version_string else 1 + except ValueError: + # default value added because of bug in posthog-js 1.19.0 + # see https://sentry.io/organizations/posthog2/issues/2738865125/?project=1899813 + # as a tombstone if the below statsd counter hasn't seen errors for N days + # then it is likely that no clients are running posthog-js 1.19.0 + # and this defaulting could be removed + statsd.incr( + f"posthog_cloud_decide_defaulted_api_version_on_value_error", + tags={"endpoint": "decide", "api_version_string": api_version_string}, + ) + api_version = 2 + except UnspecifiedCompressionFallbackParsingError as error: + # Notably don't capture this exception as it's not caused by buggy behavior, + # it's just a fallback for when we can't parse the request due to a missing header + # that we attempted to kludge by manually setting the compression type to gzip + # If this kludge fails, though all we need to do is return a 400 and move on + return cors_response( + request, + generate_exception_response("decide", f"Malformed request data: {error}", code="malformed_data"), + ) + except RequestParsingError as error: + capture_exception(error) # We still capture this on Sentry to identify actual potential bugs + return cors_response( + request, + generate_exception_response("decide", f"Malformed request data: {error}", code="malformed_data"), + ) + + token = get_token(data, request) + team = Team.objects.get_team_from_cache_or_token(token) + if team is None and token: + project_id = get_project_id(data, request) + + if not project_id: + return cors_response( + request, + generate_exception_response( + "decide", + "Project API key invalid. You can find your project API key in PostHog project settings.", + code="invalid_api_key", + type="authentication_error", + status_code=status.HTTP_401_UNAUTHORIZED, + ), ) - response["autocapture_opt_out"] = True if team.autocapture_opt_out else False - response["autocaptureExceptions"] = ( - { - "endpoint": "/e/", - } - if team.autocapture_exceptions_opt_in - else False + user = User.objects.get_from_personal_api_key(token) + if user is None: + return cors_response( + request, + generate_exception_response( + "decide", + "Invalid Personal API key.", + code="invalid_personal_key", + type="authentication_error", + status_code=status.HTTP_401_UNAUTHORIZED, + ), + ) + team = user.teams.get(id=project_id) + + if team: + if team.id in settings.DECIDE_SHORT_CIRCUITED_TEAM_IDS: + return cors_response( + request, + generate_exception_response( + "decide", + f"Team with ID {team.id} cannot access the /decide endpoint." + f"Please contact us at hey@posthog.com", + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + ), ) - # this not settings.DEBUG check is a lazy workaround because - # NEW_ANALYTICS_CAPTURE_ENDPOINT doesn't currently work in DEBUG mode - if not settings.DEBUG and str(team.id) not in (settings.NEW_ANALYTICS_CAPTURE_EXCLUDED_TEAM_IDS or []): - response["analytics"] = {"endpoint": settings.NEW_ANALYTICS_CAPTURE_ENDPOINT} + token = team.api_token - if str(team.id) not in (settings.ELEMENT_CHAIN_AS_STRING_EXCLUDED_TEAMS or []): - response["elementsChainAsString"] = True + structlog.contextvars.bind_contextvars(team_id=team.id) - response["sessionRecording"] = _session_recording_config_response(request, team, token) + disable_flags = process_bool(data.get("disable_flags")) is True + feature_flags = None + errors = False + flags_response: dict[str, Any] = {} - if settings.DECIDE_SESSION_REPLAY_QUOTA_CHECK: - from ee.billing.quota_limiting import ( - QuotaLimitingCaches, - QuotaResource, - list_limited_team_attributes, + if not disable_flags: + distinct_id = data.get("distinct_id") + if distinct_id is None: + return cors_response( + request, + generate_exception_response( + "decide", + "Decide requires a distinct_id.", + code="missing_distinct_id", + type="validation_error", + status_code=status.HTTP_400_BAD_REQUEST, + ), ) + else: + distinct_id = str(distinct_id) - limited_tokens_recordings = list_limited_team_attributes( - QuotaResource.RECORDINGS, QuotaLimitingCaches.QUOTA_LIMITER_CACHE_KEY - ) + property_overrides = {} + geoip_enabled = process_bool(data.get("geoip_disable")) is False - if token in limited_tokens_recordings: - response["quotaLimited"] = ["recordings"] - response["sessionRecording"] = False - - response["surveys"] = True if team.surveys_opt_in else False - response["heatmaps"] = True if team.heatmaps_opt_in else False - try: - default_identified_only = team.pk >= int(settings.DEFAULT_IDENTIFIED_ONLY_TEAM_ID_MIN) - except Exception: - default_identified_only = False - response["defaultIdentifiedOnly"] = bool(default_identified_only) - - site_apps = [] - # errors mean the database is unavailable, bail in this case - if team.inject_web_apps and not errors: - try: - with execute_with_timeout(200, DATABASE_FOR_FLAG_MATCHING): - site_apps = get_decide_site_apps(team, using_database=DATABASE_FOR_FLAG_MATCHING) - except Exception: - pass - - response["siteApps"] = site_apps - - # NOTE: Whenever you add something to decide response, update this test: - # `test_decide_doesnt_error_out_when_database_is_down` - # which ensures that decide doesn't error out when the database is down - - if feature_flags: - # Billing analytics for decide requests with feature flags - # Don't count if all requests are for survey targeting flags only. - if not all(flag.startswith(SURVEY_TARGETING_FLAG_PREFIX) for flag in feature_flags.keys()): - # Sample no. of decide requests with feature flags - if settings.DECIDE_BILLING_SAMPLING_RATE and random() < settings.DECIDE_BILLING_SAMPLING_RATE: - count = int(1 / settings.DECIDE_BILLING_SAMPLING_RATE) - increment_request_count(team.pk, count) + if geoip_enabled: + property_overrides = get_geoip_properties(get_ip_address(request)) - else: - # no auth provided - return cors_response( - request, - generate_exception_response( - "decide", - "No project API key provided. You can find your project API key in PostHog project settings.", - code="no_api_key", - type="authentication_error", - status_code=status.HTTP_401_UNAUTHORIZED, - ), + all_property_overrides: dict[str, Union[str, int]] = { + **property_overrides, + **(data.get("person_properties") or {}), + } + + feature_flags, _, feature_flag_payloads, errors = get_all_feature_flags( + team.pk, + distinct_id, + data.get("groups") or {}, + hash_key_override=data.get("$anon_distinct_id"), + property_value_overrides=all_property_overrides, + group_property_value_overrides=(data.get("group_properties") or {}), ) + active_flags = {key: value for key, value in feature_flags.items() if value} + + if api_version == 2: + flags_response["featureFlags"] = active_flags + elif api_version >= 3: + # v3 returns all flags, not just active ones, as well as if there was an error computing all flags + flags_response["featureFlags"] = feature_flags + flags_response["errorsWhileComputingFlags"] = errors + flags_response["featureFlagPayloads"] = feature_flag_payloads + else: + # default v1 + flags_response["featureFlags"] = list(active_flags.keys()) + + # metrics for feature flags + team_id_label = label_for_team_id_to_track(team.pk) + FLAG_EVALUATION_COUNTER.labels( + team_id=team_id_label, + errors_computing=errors, + has_hash_key_override=bool(data.get("$anon_distinct_id")), + ).inc() + else: + flags_response["featureFlags"] = {} + + # NOTE: Changed code - everything not feature flags goes in here + response = get_base_config(token, team, request, skip_db=errors) + response.update(flags_response) + + # NOTE: Whenever you add something to decide response, update this test: + # `test_decide_doesnt_error_out_when_database_is_down` + # which ensures that decide doesn't error out when the database is down + + if feature_flags: + # Billing analytics for decide requests with feature flags + # Don't count if all requests are for survey targeting flags only. + if not all(flag.startswith(SURVEY_TARGETING_FLAG_PREFIX) for flag in feature_flags.keys()): + # Sample no. of decide requests with feature flags + if settings.DECIDE_BILLING_SAMPLING_RATE and random() < settings.DECIDE_BILLING_SAMPLING_RATE: + count = int(1 / settings.DECIDE_BILLING_SAMPLING_RATE) + increment_request_count(team.pk, count) + + else: + # no auth provided + return cors_response( + request, + generate_exception_response( + "decide", + "No project API key provided. You can find your project API key in PostHog project settings.", + code="no_api_key", + type="authentication_error", + status_code=status.HTTP_401_UNAUTHORIZED, + ), + ) + statsd.incr(f"posthog_cloud_raw_endpoint_success", tags={"endpoint": "decide"}) return cors_response(request, JsonResponse(response)) -def _session_recording_config_response(request: HttpRequest, team: Team, token: str) -> bool | dict: +def _session_recording_domain_not_allowed(team: Team, request: HttpRequest) -> bool: + return team.recording_domains and not on_permitted_recording_domain(team.recording_domains, request) + + +def _session_recording_config_response(request: HttpRequest, team: Team) -> bool | dict: session_recording_config_response: bool | dict = False try: - if team.session_recording_opt_in and ( - on_permitted_recording_domain(team, request) or not team.recording_domains - ): + if team.session_recording_opt_in and not _session_recording_domain_not_allowed(team, request): capture_console_logs = True if team.capture_console_log_opt_in else False - sample_rate = team.session_recording_sample_rate or None + sample_rate = str(team.session_recording_sample_rate) if team.session_recording_sample_rate else None if sample_rate == "1.00": sample_rate = None diff --git a/posthog/api/email_verification.py b/posthog/api/email_verification.py index 83c12d1dfe1e97..796e4c616d97f1 100644 --- a/posthog/api/email_verification.py +++ b/posthog/api/email_verification.py @@ -25,7 +25,9 @@ def _make_hash_value(self, user: AbstractBaseUser, timestamp): # Due to type differences between the user model and the token generator, we need to # re-fetch the user from the database to get the correct type. usable_user: User = User.objects.get(pk=user.pk) - return f"{usable_user.pk}{usable_user.email}{usable_user.pending_email}{timestamp}" + login_timestamp = "" if user.last_login is None else user.last_login.replace(microsecond=0, tzinfo=None) + + return f"{usable_user.pk}{usable_user.email}{usable_user.pending_email}{login_timestamp}{timestamp}" email_verification_token_generator = EmailVerificationTokenGenerator() diff --git a/posthog/api/hog_function.py b/posthog/api/hog_function.py index a382ebda866e09..4549f4f3a8bb5f 100644 --- a/posthog/api/hog_function.py +++ b/posthog/api/hog_function.py @@ -107,6 +107,7 @@ class Meta: "inputs", "filters", "masking", + "mappings", "icon_url", "template", "template_id", @@ -127,8 +128,19 @@ class Meta: "inputs_schema": {"required": False}, "template_id": {"write_only": True}, "deleted": {"write_only": True}, + "type": {"required": True}, } + def validate_type(self, value): + # Ensure it is only set when creating a new function + if self.context.get("view") and self.context["view"].action == "create": + return value + + instance = cast(Optional[HogFunction], self.context.get("instance", self.instance)) + if instance and instance.type != value: + raise serializers.ValidationError("Cannot modify the type of an existing function") + return value + def validate(self, attrs): team = self.context["get_team"]() attrs["team"] = team @@ -136,6 +148,8 @@ def validate(self, attrs): has_addon = team.organization.is_feature_available(AvailableFeature.DATA_PIPELINES) instance = cast(Optional[HogFunction], self.context.get("instance", self.instance)) + hog_type = attrs.get("type", instance.type if instance else "destination") + if not has_addon: template_id = attrs.get("template_id", instance.template_id if instance else None) template = HOG_FUNCTION_TEMPLATES_BY_ID.get(template_id, None) @@ -153,42 +167,51 @@ def validate(self, attrs): # Without the addon, they cannot deviate from the template attrs["inputs_schema"] = template.inputs_schema + attrs["mappings"] = template.mappings attrs["hog"] = template.hog - if "type" not in attrs: - attrs["type"] = "destination" - if self.context.get("view") and self.context["view"].action == "create": # Ensure we have sensible defaults when created attrs["filters"] = attrs.get("filters") or {} attrs["inputs_schema"] = attrs.get("inputs_schema") or [] attrs["inputs"] = attrs.get("inputs") or {} + attrs["mappings"] = attrs.get("mappings") or None + + # Used for both top level input validation, and mappings input validation + def validate_input_and_filters(attrs: dict): + if "inputs_schema" in attrs: + attrs["inputs_schema"] = validate_inputs_schema(attrs["inputs_schema"]) + + if "inputs" in attrs: + inputs = attrs["inputs"] or {} + existing_encrypted_inputs = None - if "inputs_schema" in attrs: - attrs["inputs_schema"] = validate_inputs_schema(attrs["inputs_schema"]) + if instance and instance.encrypted_inputs: + existing_encrypted_inputs = instance.encrypted_inputs - if "inputs" in attrs: - inputs = attrs["inputs"] or {} - existing_encrypted_inputs = None + attrs["inputs_schema"] = attrs.get("inputs_schema", instance.inputs_schema if instance else []) + attrs["inputs"] = validate_inputs(attrs["inputs_schema"], inputs, existing_encrypted_inputs, hog_type) - if instance and instance.encrypted_inputs: - existing_encrypted_inputs = instance.encrypted_inputs + if "filters" in attrs: + if hog_type in TYPES_WITH_COMPILED_FILTERS: + attrs["filters"] = compile_filters_bytecode(attrs["filters"], team) + elif hog_type in TYPES_WITH_TRANSPILED_FILTERS: + compiler = JavaScriptCompiler() + code = compiler.visit(compile_filters_expr(attrs["filters"], team)) + attrs["filters"]["transpiled"] = {"lang": "ts", "code": code, "stl": list(compiler.stl_functions)} + if "bytecode" in attrs["filters"]: + del attrs["filters"]["bytecode"] - attrs["inputs_schema"] = attrs.get("inputs_schema", instance.inputs_schema if instance else []) - attrs["inputs"] = validate_inputs(attrs["inputs_schema"], inputs, existing_encrypted_inputs, attrs["type"]) + validate_input_and_filters(attrs) - if "filters" in attrs: - if attrs["type"] in TYPES_WITH_COMPILED_FILTERS: - attrs["filters"] = compile_filters_bytecode(attrs["filters"], team) - elif attrs["type"] in TYPES_WITH_TRANSPILED_FILTERS: - compiler = JavaScriptCompiler() - code = compiler.visit(compile_filters_expr(attrs["filters"], team)) - attrs["filters"]["transpiled"] = {"lang": "ts", "code": code, "stl": list(compiler.stl_functions)} - if "bytecode" in attrs["filters"]: - del attrs["filters"]["bytecode"] + if attrs.get("mappings", None) is not None: + if hog_type != "site_destination": + raise serializers.ValidationError({"mappings": "Mappings are only allowed for site destinations."}) + for mapping in attrs["mappings"]: + validate_input_and_filters(mapping) if "hog" in attrs: - if attrs["type"] in TYPES_WITH_JAVASCRIPT_SOURCE: + if hog_type in TYPES_WITH_JAVASCRIPT_SOURCE: try: # Validate transpilation using the model instance attrs["transpiled"] = get_transpiled_function( @@ -203,7 +226,7 @@ def validate(self, attrs): raise serializers.ValidationError({"hog": "Error in TypeScript code"}) attrs["bytecode"] = None else: - attrs["bytecode"] = compile_hog(attrs["hog"]) + attrs["bytecode"] = compile_hog(attrs["hog"], hog_type) attrs["transpiled"] = None else: attrs["bytecode"] = None diff --git a/posthog/api/hog_function_template.py b/posthog/api/hog_function_template.py index 2044affa77075b..38641031167adf 100644 --- a/posthog/api/hog_function_template.py +++ b/posthog/api/hog_function_template.py @@ -6,19 +6,36 @@ from rest_framework.exceptions import NotFound from posthog.cdp.templates import HOG_FUNCTION_TEMPLATES -from posthog.cdp.templates.hog_function_template import HogFunctionTemplate, HogFunctionSubTemplate +from posthog.cdp.templates.hog_function_template import ( + HogFunctionMapping, + HogFunctionMappingTemplate, + HogFunctionTemplate, + HogFunctionSubTemplate, +) from rest_framework_dataclasses.serializers import DataclassSerializer logger = structlog.get_logger(__name__) +class HogFunctionMappingSerializer(DataclassSerializer): + class Meta: + dataclass = HogFunctionMapping + + +class HogFunctionMappingTemplateSerializer(DataclassSerializer): + class Meta: + dataclass = HogFunctionMappingTemplate + + class HogFunctionSubTemplateSerializer(DataclassSerializer): class Meta: dataclass = HogFunctionSubTemplate class HogFunctionTemplateSerializer(DataclassSerializer): + mapping_templates = HogFunctionMappingTemplateSerializer(many=True, required=False) + mappings = HogFunctionMappingSerializer(many=True, required=False) sub_templates = HogFunctionSubTemplateSerializer(many=True, required=False) class Meta: diff --git a/posthog/api/remote_config.py b/posthog/api/remote_config.py index f1a896c2f527bb..ba62347c8a77c8 100644 --- a/posthog/api/remote_config.py +++ b/posthog/api/remote_config.py @@ -5,6 +5,14 @@ from posthog.models.remote_config import RemoteConfig +def add_vary_headers(response): + """ + Add Vary headers for Origin and Referer to responses. + """ + response["Vary"] = "Origin, Referer" + return response + + class BaseRemoteConfigAPIView(APIView): """ Base class for RemoteConfig API views. @@ -23,28 +31,28 @@ def check_token(self, token: str): class RemoteConfigAPIView(BaseRemoteConfigAPIView): def get(self, request, token: str, *args, **kwargs): try: - resource = RemoteConfig.get_config_via_token(self.check_token(token)) + resource = RemoteConfig.get_config_via_token(self.check_token(token), request=request) except RemoteConfig.DoesNotExist: raise Http404() - return JsonResponse(resource) + return add_vary_headers(JsonResponse(resource)) class RemoteConfigJSAPIView(BaseRemoteConfigAPIView): def get(self, request, token: str, *args, **kwargs): try: - script_content = RemoteConfig.get_config_js_via_token(self.check_token(token)) + script_content = RemoteConfig.get_config_js_via_token(self.check_token(token), request=request) except RemoteConfig.DoesNotExist: raise Http404() - return HttpResponse(script_content, content_type="application/javascript") + return add_vary_headers(HttpResponse(script_content, content_type="application/javascript")) class RemoteConfigArrayJSAPIView(BaseRemoteConfigAPIView): def get(self, request, token: str, *args, **kwargs): try: - script_content = RemoteConfig.get_array_js_via_token(self.check_token(token)) + script_content = RemoteConfig.get_array_js_via_token(self.check_token(token), request=request) except RemoteConfig.DoesNotExist: raise Http404() - return HttpResponse(script_content, content_type="application/javascript") + return add_vary_headers(HttpResponse(script_content, content_type="application/javascript")) diff --git a/posthog/api/survey.py b/posthog/api/survey.py index df100f8717b328..835860bb00906c 100644 --- a/posthog/api/survey.py +++ b/posthog/api/survey.py @@ -325,7 +325,7 @@ def validate(self, data): if response_sampling_start_date < today_utc: raise serializers.ValidationError( { - "response_sampling_start_date": "Response sampling start date must be today or a future date in UTC." + "response_sampling_start_date": f"Response sampling start date must be today or a future date in UTC. Got {response_sampling_start_date} when current time is {today_utc}" } ) diff --git a/posthog/api/team.py b/posthog/api/team.py index c8ca32abe5ce0a..d2b9ca018dbdfd 100644 --- a/posthog/api/team.py +++ b/posthog/api/team.py @@ -115,6 +115,7 @@ class Meta: model = Team fields = [ "id", + "project_id", "uuid", "name", "api_token", @@ -141,6 +142,7 @@ class Meta: "heatmaps_opt_in", "capture_dead_clicks", ] + read_only_fields = fields class TeamSerializer(serializers.ModelSerializer, UserPermissionsSerializerMixin, UserAccessControlSerializerMixin): diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr index d80afcb5bd4b4b..049ef77b360b50 100644 --- a/posthog/api/test/__snapshots__/test_decide.ambr +++ b/posthog/api/test/__snapshots__/test_decide.ambr @@ -265,6 +265,7 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", "posthog_hogfunction"."template_id", "posthog_team"."id", @@ -635,9 +636,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -880,6 +946,22 @@ AND "posthog_pluginconfig"."team_id" = 99999) ''' # --- +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.33 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginconfig"."web_token", + "posthog_pluginsourcefile"."updated_at", + "posthog_plugin"."updated_at", + "posthog_pluginconfig"."updated_at" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- # name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.4 ''' SELECT "posthog_organizationmembership"."id", @@ -1090,6 +1172,7 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", "posthog_hogfunction"."template_id", "posthog_team"."id", @@ -1521,9 +1604,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -1624,6 +1772,47 @@ LIMIT 21 ''' # --- +# name: TestDecide.test_flag_with_behavioural_cohorts.22 + ''' + SELECT "posthog_cohort"."id", + "posthog_cohort"."name", + "posthog_cohort"."description", + "posthog_cohort"."team_id", + "posthog_cohort"."deleted", + "posthog_cohort"."filters", + "posthog_cohort"."query", + "posthog_cohort"."version", + "posthog_cohort"."pending_version", + "posthog_cohort"."count", + "posthog_cohort"."created_by_id", + "posthog_cohort"."created_at", + "posthog_cohort"."is_calculating", + "posthog_cohort"."last_calculation", + "posthog_cohort"."errors_calculating", + "posthog_cohort"."last_error_at", + "posthog_cohort"."is_static", + "posthog_cohort"."groups" + FROM "posthog_cohort" + WHERE (NOT "posthog_cohort"."deleted" + AND "posthog_cohort"."team_id" = 99999) + ''' +# --- +# name: TestDecide.test_flag_with_behavioural_cohorts.23 + ''' + SELECT "posthog_group"."id", + "posthog_group"."team_id", + "posthog_group"."group_key", + "posthog_group"."group_type_index", + "posthog_group"."group_properties", + "posthog_group"."created_at", + "posthog_group"."properties_last_updated_at", + "posthog_group"."properties_last_operation", + "posthog_group"."version" + FROM "posthog_group" + WHERE "posthog_group"."team_id" = 99999 + LIMIT 21 + ''' +# --- # name: TestDecide.test_flag_with_behavioural_cohorts.3 ''' SELECT "posthog_team"."id", @@ -1823,9 +2012,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -1954,6 +2208,7 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", "posthog_hogfunction"."template_id", "posthog_team"."id", @@ -2385,11 +2640,76 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" - FROM "posthog_hogfunction" - WHERE ("posthog_hogfunction"."enabled" - AND "posthog_hogfunction"."team_id" = 99999 + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', 'site_app')) ''' @@ -2480,6 +2800,43 @@ AND "posthog_person"."team_id" = 99999) ''' # --- +# name: TestDecide.test_flag_with_regular_cohorts.22 + ''' + SELECT "posthog_cohort"."id", + "posthog_cohort"."name", + "posthog_cohort"."description", + "posthog_cohort"."team_id", + "posthog_cohort"."deleted", + "posthog_cohort"."filters", + "posthog_cohort"."query", + "posthog_cohort"."version", + "posthog_cohort"."pending_version", + "posthog_cohort"."count", + "posthog_cohort"."created_by_id", + "posthog_cohort"."created_at", + "posthog_cohort"."is_calculating", + "posthog_cohort"."last_calculation", + "posthog_cohort"."errors_calculating", + "posthog_cohort"."last_error_at", + "posthog_cohort"."is_static", + "posthog_cohort"."groups" + FROM "posthog_cohort" + WHERE (NOT "posthog_cohort"."deleted" + AND "posthog_cohort"."team_id" = 99999) + ''' +# --- +# name: TestDecide.test_flag_with_regular_cohorts.23 + ''' + SELECT (("posthog_person"."properties" -> '$some_prop_1') = '"something_1"'::jsonb + AND "posthog_person"."properties" ? '$some_prop_1' + AND NOT (("posthog_person"."properties" -> '$some_prop_1') = 'null'::jsonb)) AS "flag_X_condition_0" + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") + WHERE ("posthog_persondistinctid"."distinct_id" = 'another_id' + AND "posthog_persondistinctid"."team_id" = 99999 + AND "posthog_person"."team_id" = 99999) + ''' +# --- # name: TestDecide.test_flag_with_regular_cohorts.3 ''' SELECT "posthog_team"."id", @@ -2679,9 +3036,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -2912,9 +3334,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -3211,28 +3698,93 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" - FROM "posthog_hogfunction" - WHERE ("posthog_hogfunction"."enabled" - AND "posthog_hogfunction"."team_id" = 99999 - AND "posthog_hogfunction"."type" IN ('site_destination', - 'site_app')) - ''' -# --- -# name: TestDecide.test_web_app_queries.2 - ''' - SELECT "posthog_pluginconfig"."id", - "posthog_pluginconfig"."web_token", - "posthog_pluginsourcefile"."updated_at", - "posthog_plugin"."updated_at", - "posthog_pluginconfig"."updated_at" - FROM "posthog_pluginconfig" - INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") - INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") - WHERE ("posthog_pluginconfig"."enabled" - AND "posthog_pluginsourcefile"."filename" = 'site.ts' - AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecide.test_web_app_queries.2 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginconfig"."web_token", + "posthog_pluginsourcefile"."updated_at", + "posthog_plugin"."updated_at", + "posthog_pluginconfig"."updated_at" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' AND "posthog_pluginconfig"."team_id" = 99999) ''' # --- @@ -3268,6 +3820,38 @@ AND "posthog_pluginconfig"."team_id" = 99999) ''' # --- +# name: TestDecide.test_web_app_queries.22 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginconfig"."web_token", + "posthog_pluginsourcefile"."updated_at", + "posthog_plugin"."updated_at", + "posthog_pluginconfig"."updated_at" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecide.test_web_app_queries.23 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginconfig"."web_token", + "posthog_pluginsourcefile"."updated_at", + "posthog_plugin"."updated_at", + "posthog_pluginconfig"."updated_at" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- # name: TestDecide.test_web_app_queries.3 ''' SELECT "posthog_hogfunction"."id", @@ -3288,6 +3872,7 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", "posthog_hogfunction"."template_id", "posthog_team"."id", @@ -3613,3 +4198,5125 @@ AND "posthog_pluginconfig"."team_id" = 99999) ''' # --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down + ''' + SELECT "posthog_user"."id", + "posthog_user"."password", + "posthog_user"."last_login", + "posthog_user"."first_name", + "posthog_user"."last_name", + "posthog_user"."is_staff", + "posthog_user"."date_joined", + "posthog_user"."uuid", + "posthog_user"."current_organization_id", + "posthog_user"."current_team_id", + "posthog_user"."email", + "posthog_user"."pending_email", + "posthog_user"."temporary_token", + "posthog_user"."distinct_id", + "posthog_user"."is_email_verified", + "posthog_user"."has_seen_product_intro_for", + "posthog_user"."strapi_id", + "posthog_user"."is_active", + "posthog_user"."theme_mode", + "posthog_user"."partial_notification_settings", + "posthog_user"."anonymize_data", + "posthog_user"."toolbar_mode", + "posthog_user"."hedgehog_config", + "posthog_user"."events_column_config", + "posthog_user"."email_opt_in" + FROM "posthog_user" + WHERE "posthog_user"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.1 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."logo_media_id", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE "posthog_organizationmembership"."user_id" = 99999 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.10 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."logo_media_id", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE "posthog_organizationmembership"."user_id" = 99999 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.11 + ''' + SELECT "posthog_team"."id", + "posthog_team"."organization_id", + "posthog_team"."access_control" + FROM "posthog_team" + WHERE "posthog_team"."organization_id" IN ('00000000-0000-0000-0000-000000000000'::uuid) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.12 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."logo_media_id", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE ("posthog_organizationmembership"."organization_id" = '00000000-0000-0000-0000-000000000000'::uuid + AND "posthog_organizationmembership"."user_id" = 99999) + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.13 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."logo_media_id", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE "posthog_organizationmembership"."user_id" = 99999 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.14 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."logo_media_id", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE "posthog_organizationmembership"."user_id" = 99999 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.15 + ''' + SELECT "posthog_team"."id", + "posthog_team"."organization_id", + "posthog_team"."access_control" + FROM "posthog_team" + WHERE "posthog_team"."organization_id" IN ('00000000-0000-0000-0000-000000000000'::uuid) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.16 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."logo_media_id", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE ("posthog_organizationmembership"."organization_id" = '00000000-0000-0000-0000-000000000000'::uuid + AND "posthog_organizationmembership"."user_id" = 99999) + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.17 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."filters" @> '{"filter_test_accounts": true}'::jsonb) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.18 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.19 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.2 + ''' + SELECT "posthog_team"."id", + "posthog_team"."organization_id", + "posthog_team"."access_control" + FROM "posthog_team" + WHERE "posthog_team"."organization_id" IN ('00000000-0000-0000-0000-000000000000'::uuid) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.20 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.21 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.22 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.23 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginconfig"."web_token", + "posthog_pluginsourcefile"."updated_at", + "posthog_plugin"."updated_at", + "posthog_pluginconfig"."updated_at" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.24 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.25 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.26 + ''' + SELECT 1 AS "a" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."project_id" = 99999 + LIMIT 1 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.27 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.28 + ''' + SELECT "posthog_productintent"."id", + "posthog_productintent"."team_id", + "posthog_productintent"."created_at", + "posthog_productintent"."updated_at", + "posthog_productintent"."product_type", + "posthog_productintent"."onboarding_completed_at", + "posthog_productintent"."activated_at", + "posthog_productintent"."activation_last_checked_at" + FROM "posthog_productintent" + WHERE "posthog_productintent"."team_id" = 99999 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.29 + ''' + SELECT "posthog_productintent"."product_type", + "posthog_productintent"."created_at", + "posthog_productintent"."onboarding_completed_at", + "posthog_productintent"."updated_at" + FROM "posthog_productintent" + WHERE "posthog_productintent"."team_id" = 99999 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.3 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.30 + ''' + SELECT "posthog_user"."id", + "posthog_user"."password", + "posthog_user"."last_login", + "posthog_user"."first_name", + "posthog_user"."last_name", + "posthog_user"."is_staff", + "posthog_user"."date_joined", + "posthog_user"."uuid", + "posthog_user"."current_organization_id", + "posthog_user"."current_team_id", + "posthog_user"."email", + "posthog_user"."pending_email", + "posthog_user"."temporary_token", + "posthog_user"."distinct_id", + "posthog_user"."is_email_verified", + "posthog_user"."has_seen_product_intro_for", + "posthog_user"."strapi_id", + "posthog_user"."is_active", + "posthog_user"."theme_mode", + "posthog_user"."partial_notification_settings", + "posthog_user"."anonymize_data", + "posthog_user"."toolbar_mode", + "posthog_user"."hedgehog_config", + "posthog_user"."events_column_config", + "posthog_user"."email_opt_in" + FROM "posthog_user" + WHERE "posthog_user"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.31 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.32 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.33 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.34 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.35 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginconfig"."web_token", + "posthog_pluginsourcefile"."updated_at", + "posthog_plugin"."updated_at", + "posthog_pluginconfig"."updated_at" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.36 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.37 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.38 + ''' + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.39 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.4 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."logo_media_id", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE ("posthog_organizationmembership"."organization_id" = '00000000-0000-0000-0000-000000000000'::uuid + AND "posthog_organizationmembership"."user_id" = 99999) + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.40 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.41 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.42 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.43 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginconfig"."web_token", + "posthog_pluginsourcefile"."updated_at", + "posthog_plugin"."updated_at", + "posthog_pluginconfig"."updated_at" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.44 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.45 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.46 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.47 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id" + FROM "posthog_hogfunction" + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.48 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id" + FROM "posthog_hogfunction" + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.5 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."logo_media_id", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE "posthog_organizationmembership"."user_id" = 99999 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.6 + ''' + SELECT "posthog_team"."id", + "posthog_team"."organization_id", + "posthog_team"."access_control" + FROM "posthog_team" + WHERE "posthog_team"."organization_id" IN ('00000000-0000-0000-0000-000000000000'::uuid) + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.7 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."logo_media_id", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE ("posthog_organizationmembership"."organization_id" = '00000000-0000-0000-0000-000000000000'::uuid + AND "posthog_organizationmembership"."user_id" = 99999) + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.8 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."logo_media_id", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE ("posthog_organizationmembership"."organization_id" = '00000000-0000-0000-0000-000000000000'::uuid + AND "posthog_organizationmembership"."user_id" = 99999) + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_decide_doesnt_error_out_when_database_is_down.9 + ''' + SELECT "ee_accesscontrol"."id", + "ee_accesscontrol"."team_id", + "ee_accesscontrol"."access_level", + "ee_accesscontrol"."resource", + "ee_accesscontrol"."resource_id", + "ee_accesscontrol"."organization_member_id", + "ee_accesscontrol"."role_id", + "ee_accesscontrol"."created_by_id", + "ee_accesscontrol"."created_at", + "ee_accesscontrol"."updated_at" + FROM "ee_accesscontrol" + LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") + INNER JOIN "posthog_team" ON ("ee_accesscontrol"."team_id" = "posthog_team"."id") + WHERE (("ee_accesscontrol"."organization_member_id" IS NULL + AND "ee_accesscontrol"."resource" = 'project' + AND "ee_accesscontrol"."resource_id" = '99999' + AND "ee_accesscontrol"."role_id" IS NULL + AND "ee_accesscontrol"."team_id" = 99999) + OR ("posthog_organizationmembership"."user_id" = 99999 + AND "ee_accesscontrol"."resource" = 'project' + AND "ee_accesscontrol"."resource_id" = '99999' + AND "ee_accesscontrol"."role_id" IS NULL + AND "ee_accesscontrol"."team_id" = 99999) + OR ("ee_accesscontrol"."organization_member_id" IS NULL + AND "ee_accesscontrol"."resource" = 'project' + AND "ee_accesscontrol"."resource_id" IS NULL + AND "ee_accesscontrol"."role_id" IS NULL + AND "ee_accesscontrol"."team_id" = 99999) + OR ("posthog_organizationmembership"."user_id" = 99999 + AND "ee_accesscontrol"."resource" = 'project' + AND "ee_accesscontrol"."resource_id" IS NULL + AND "ee_accesscontrol"."role_id" IS NULL + AND "ee_accesscontrol"."team_id" = 99999) + OR ("ee_accesscontrol"."organization_member_id" IS NULL + AND "ee_accesscontrol"."resource" = 'project' + AND "ee_accesscontrol"."resource_id" IS NOT NULL + AND "ee_accesscontrol"."role_id" IS NULL + AND "posthog_team"."organization_id" = '00000000-0000-0000-0000-000000000000'::uuid) + OR ("posthog_organizationmembership"."user_id" = 99999 + AND "ee_accesscontrol"."resource" = 'project' + AND "ee_accesscontrol"."resource_id" IS NOT NULL + AND "ee_accesscontrol"."role_id" IS NULL + AND "posthog_team"."organization_id" = '00000000-0000-0000-0000-000000000000'::uuid)) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."filters" @> '{"filter_test_accounts": true}'::jsonb) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.1 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.10 + ''' + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.11 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.12 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.13 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.14 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.15 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.16 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.17 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.18 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.19 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.2 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.20 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.21 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.22 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.23 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.24 + ''' + SELECT "posthog_cohort"."id", + "posthog_cohort"."name", + "posthog_cohort"."description", + "posthog_cohort"."team_id", + "posthog_cohort"."deleted", + "posthog_cohort"."filters", + "posthog_cohort"."query", + "posthog_cohort"."version", + "posthog_cohort"."pending_version", + "posthog_cohort"."count", + "posthog_cohort"."created_by_id", + "posthog_cohort"."created_at", + "posthog_cohort"."is_calculating", + "posthog_cohort"."last_calculation", + "posthog_cohort"."errors_calculating", + "posthog_cohort"."last_error_at", + "posthog_cohort"."is_static", + "posthog_cohort"."groups" + FROM "posthog_cohort" + WHERE (NOT "posthog_cohort"."deleted" + AND "posthog_cohort"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.25 + ''' + SELECT "posthog_group"."id", + "posthog_group"."team_id", + "posthog_group"."group_key", + "posthog_group"."group_type_index", + "posthog_group"."group_properties", + "posthog_group"."created_at", + "posthog_group"."properties_last_updated_at", + "posthog_group"."properties_last_operation", + "posthog_group"."version" + FROM "posthog_group" + WHERE "posthog_group"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.26 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.27 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.28 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.29 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.3 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.30 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.31 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.32 + ''' + SELECT "posthog_cohort"."id", + "posthog_cohort"."name", + "posthog_cohort"."description", + "posthog_cohort"."team_id", + "posthog_cohort"."deleted", + "posthog_cohort"."filters", + "posthog_cohort"."query", + "posthog_cohort"."version", + "posthog_cohort"."pending_version", + "posthog_cohort"."count", + "posthog_cohort"."created_by_id", + "posthog_cohort"."created_at", + "posthog_cohort"."is_calculating", + "posthog_cohort"."last_calculation", + "posthog_cohort"."errors_calculating", + "posthog_cohort"."last_error_at", + "posthog_cohort"."is_static", + "posthog_cohort"."groups" + FROM "posthog_cohort" + WHERE (NOT "posthog_cohort"."deleted" + AND "posthog_cohort"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.33 + ''' + SELECT "posthog_group"."id", + "posthog_group"."team_id", + "posthog_group"."group_key", + "posthog_group"."group_type_index", + "posthog_group"."group_properties", + "posthog_group"."created_at", + "posthog_group"."properties_last_updated_at", + "posthog_group"."properties_last_operation", + "posthog_group"."version" + FROM "posthog_group" + WHERE "posthog_group"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.34 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id" + FROM "posthog_hogfunction" + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.35 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id" + FROM "posthog_hogfunction" + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.36 + ''' + SELECT "posthog_cohort"."id", + "posthog_cohort"."name", + "posthog_cohort"."description", + "posthog_cohort"."team_id", + "posthog_cohort"."deleted", + "posthog_cohort"."filters", + "posthog_cohort"."query", + "posthog_cohort"."version", + "posthog_cohort"."pending_version", + "posthog_cohort"."count", + "posthog_cohort"."created_by_id", + "posthog_cohort"."created_at", + "posthog_cohort"."is_calculating", + "posthog_cohort"."last_calculation", + "posthog_cohort"."errors_calculating", + "posthog_cohort"."last_error_at", + "posthog_cohort"."is_static", + "posthog_cohort"."groups" + FROM "posthog_cohort" + WHERE (NOT "posthog_cohort"."deleted" + AND "posthog_cohort"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.37 + ''' + SELECT "posthog_group"."id", + "posthog_group"."team_id", + "posthog_group"."group_key", + "posthog_group"."group_type_index", + "posthog_group"."group_properties", + "posthog_group"."created_at", + "posthog_group"."properties_last_updated_at", + "posthog_group"."properties_last_operation", + "posthog_group"."version" + FROM "posthog_group" + WHERE "posthog_group"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.4 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.5 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.6 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.7 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.8 + ''' + SELECT "posthog_user"."id", + "posthog_user"."password", + "posthog_user"."last_login", + "posthog_user"."first_name", + "posthog_user"."last_name", + "posthog_user"."is_staff", + "posthog_user"."date_joined", + "posthog_user"."uuid", + "posthog_user"."current_organization_id", + "posthog_user"."current_team_id", + "posthog_user"."email", + "posthog_user"."pending_email", + "posthog_user"."temporary_token", + "posthog_user"."distinct_id", + "posthog_user"."is_email_verified", + "posthog_user"."has_seen_product_intro_for", + "posthog_user"."strapi_id", + "posthog_user"."is_active", + "posthog_user"."theme_mode", + "posthog_user"."partial_notification_settings", + "posthog_user"."anonymize_data", + "posthog_user"."toolbar_mode", + "posthog_user"."hedgehog_config", + "posthog_user"."events_column_config", + "posthog_user"."email_opt_in" + FROM "posthog_user" + WHERE "posthog_user"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_behavioural_cohorts.9 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."filters" @> '{"filter_test_accounts": true}'::jsonb) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.1 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.10 + ''' + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.11 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.12 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.13 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.14 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.15 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.16 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.17 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.18 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.19 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.2 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.20 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.21 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.22 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.23 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.24 + ''' + SELECT "posthog_cohort"."id", + "posthog_cohort"."name", + "posthog_cohort"."description", + "posthog_cohort"."team_id", + "posthog_cohort"."deleted", + "posthog_cohort"."filters", + "posthog_cohort"."query", + "posthog_cohort"."version", + "posthog_cohort"."pending_version", + "posthog_cohort"."count", + "posthog_cohort"."created_by_id", + "posthog_cohort"."created_at", + "posthog_cohort"."is_calculating", + "posthog_cohort"."last_calculation", + "posthog_cohort"."errors_calculating", + "posthog_cohort"."last_error_at", + "posthog_cohort"."is_static", + "posthog_cohort"."groups" + FROM "posthog_cohort" + WHERE (NOT "posthog_cohort"."deleted" + AND "posthog_cohort"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.25 + ''' + SELECT (("posthog_person"."properties" -> '$some_prop_1') = '"something_1"'::jsonb + AND "posthog_person"."properties" ? '$some_prop_1' + AND NOT (("posthog_person"."properties" -> '$some_prop_1') = 'null'::jsonb)) AS "flag_X_condition_0" + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") + WHERE ("posthog_persondistinctid"."distinct_id" = 'example_id_1' + AND "posthog_persondistinctid"."team_id" = 99999 + AND "posthog_person"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.26 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.27 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.28 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.29 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.3 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.30 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.31 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.32 + ''' + SELECT "posthog_cohort"."id", + "posthog_cohort"."name", + "posthog_cohort"."description", + "posthog_cohort"."team_id", + "posthog_cohort"."deleted", + "posthog_cohort"."filters", + "posthog_cohort"."query", + "posthog_cohort"."version", + "posthog_cohort"."pending_version", + "posthog_cohort"."count", + "posthog_cohort"."created_by_id", + "posthog_cohort"."created_at", + "posthog_cohort"."is_calculating", + "posthog_cohort"."last_calculation", + "posthog_cohort"."errors_calculating", + "posthog_cohort"."last_error_at", + "posthog_cohort"."is_static", + "posthog_cohort"."groups" + FROM "posthog_cohort" + WHERE (NOT "posthog_cohort"."deleted" + AND "posthog_cohort"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.33 + ''' + SELECT (("posthog_person"."properties" -> '$some_prop_1') = '"something_1"'::jsonb + AND "posthog_person"."properties" ? '$some_prop_1' + AND NOT (("posthog_person"."properties" -> '$some_prop_1') = 'null'::jsonb)) AS "flag_X_condition_0" + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") + WHERE ("posthog_persondistinctid"."distinct_id" = 'another_id' + AND "posthog_persondistinctid"."team_id" = 99999 + AND "posthog_person"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.34 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id" + FROM "posthog_hogfunction" + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.35 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id" + FROM "posthog_hogfunction" + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.36 + ''' + SELECT "posthog_cohort"."id", + "posthog_cohort"."name", + "posthog_cohort"."description", + "posthog_cohort"."team_id", + "posthog_cohort"."deleted", + "posthog_cohort"."filters", + "posthog_cohort"."query", + "posthog_cohort"."version", + "posthog_cohort"."pending_version", + "posthog_cohort"."count", + "posthog_cohort"."created_by_id", + "posthog_cohort"."created_at", + "posthog_cohort"."is_calculating", + "posthog_cohort"."last_calculation", + "posthog_cohort"."errors_calculating", + "posthog_cohort"."last_error_at", + "posthog_cohort"."is_static", + "posthog_cohort"."groups" + FROM "posthog_cohort" + WHERE (NOT "posthog_cohort"."deleted" + AND "posthog_cohort"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.37 + ''' + SELECT (("posthog_person"."properties" -> '$some_prop_1') = '"something_1"'::jsonb + AND "posthog_person"."properties" ? '$some_prop_1' + AND NOT (("posthog_person"."properties" -> '$some_prop_1') = 'null'::jsonb)) AS "flag_X_condition_0" + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") + WHERE ("posthog_persondistinctid"."distinct_id" = 'another_id' + AND "posthog_persondistinctid"."team_id" = 99999 + AND "posthog_person"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.4 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.5 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.6 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.7 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.8 + ''' + SELECT "posthog_user"."id", + "posthog_user"."password", + "posthog_user"."last_login", + "posthog_user"."first_name", + "posthog_user"."last_name", + "posthog_user"."is_staff", + "posthog_user"."date_joined", + "posthog_user"."uuid", + "posthog_user"."current_organization_id", + "posthog_user"."current_team_id", + "posthog_user"."email", + "posthog_user"."pending_email", + "posthog_user"."temporary_token", + "posthog_user"."distinct_id", + "posthog_user"."is_email_verified", + "posthog_user"."has_seen_product_intro_for", + "posthog_user"."strapi_id", + "posthog_user"."is_active", + "posthog_user"."theme_mode", + "posthog_user"."partial_notification_settings", + "posthog_user"."anonymize_data", + "posthog_user"."toolbar_mode", + "posthog_user"."hedgehog_config", + "posthog_user"."events_column_config", + "posthog_user"."email_opt_in" + FROM "posthog_user" + WHERE "posthog_user"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestDecideRemoteConfig.test_flag_with_regular_cohorts.9 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- diff --git a/posthog/api/test/__snapshots__/test_early_access_feature.ambr b/posthog/api/test/__snapshots__/test_early_access_feature.ambr index d14002302aa249..c7820616d0cfd3 100644 --- a/posthog/api/test/__snapshots__/test_early_access_feature.ambr +++ b/posthog/api/test/__snapshots__/test_early_access_feature.ambr @@ -366,9 +366,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -565,6 +630,101 @@ LIMIT 21 ''' # --- +# name: TestPreviewList.test_early_access_features.20 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."api_token" = 'token123' + LIMIT 21 + ''' +# --- +# name: TestPreviewList.test_early_access_features.21 + ''' + SELECT "posthog_earlyaccessfeature"."id", + "posthog_earlyaccessfeature"."team_id", + "posthog_earlyaccessfeature"."feature_flag_id", + "posthog_earlyaccessfeature"."name", + "posthog_earlyaccessfeature"."description", + "posthog_earlyaccessfeature"."stage", + "posthog_earlyaccessfeature"."documentation_url", + "posthog_earlyaccessfeature"."created_at", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_earlyaccessfeature" + INNER JOIN "posthog_team" ON ("posthog_earlyaccessfeature"."team_id" = "posthog_team"."id") + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_earlyaccessfeature"."feature_flag_id" = "posthog_featureflag"."id") + WHERE ("posthog_earlyaccessfeature"."stage" = 'beta' + AND "posthog_team"."project_id" = 99999) + ''' +# --- # name: TestPreviewList.test_early_access_features.3 ''' SELECT "posthog_remoteconfig"."id", @@ -776,9 +936,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -808,3 +1033,1140 @@ AND "posthog_featureflag"."team_id" = 99999) ''' # --- +# name: TestPreviewList.test_early_access_features_with_cached_team + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_cached_team.1 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."api_token" = 'token123' + LIMIT 21 + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_cached_team.10 + ''' + SELECT "posthog_user"."id", + "posthog_user"."password", + "posthog_user"."last_login", + "posthog_user"."first_name", + "posthog_user"."last_name", + "posthog_user"."is_staff", + "posthog_user"."date_joined", + "posthog_user"."uuid", + "posthog_user"."current_organization_id", + "posthog_user"."current_team_id", + "posthog_user"."email", + "posthog_user"."pending_email", + "posthog_user"."temporary_token", + "posthog_user"."distinct_id", + "posthog_user"."is_email_verified", + "posthog_user"."has_seen_product_intro_for", + "posthog_user"."strapi_id", + "posthog_user"."is_active", + "posthog_user"."theme_mode", + "posthog_user"."partial_notification_settings", + "posthog_user"."anonymize_data", + "posthog_user"."toolbar_mode", + "posthog_user"."hedgehog_config", + "posthog_user"."events_column_config", + "posthog_user"."email_opt_in" + FROM "posthog_user" + WHERE "posthog_user"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_cached_team.11 + ''' + SELECT "posthog_earlyaccessfeature"."id", + "posthog_earlyaccessfeature"."team_id", + "posthog_earlyaccessfeature"."feature_flag_id", + "posthog_earlyaccessfeature"."name", + "posthog_earlyaccessfeature"."description", + "posthog_earlyaccessfeature"."stage", + "posthog_earlyaccessfeature"."documentation_url", + "posthog_earlyaccessfeature"."created_at", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_earlyaccessfeature" + INNER JOIN "posthog_team" ON ("posthog_earlyaccessfeature"."team_id" = "posthog_team"."id") + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_earlyaccessfeature"."feature_flag_id" = "posthog_featureflag"."id") + WHERE ("posthog_earlyaccessfeature"."stage" = 'beta' + AND "posthog_team"."project_id" = 99999) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_cached_team.12 + ''' + SELECT "posthog_earlyaccessfeature"."id", + "posthog_earlyaccessfeature"."team_id", + "posthog_earlyaccessfeature"."feature_flag_id", + "posthog_earlyaccessfeature"."name", + "posthog_earlyaccessfeature"."description", + "posthog_earlyaccessfeature"."stage", + "posthog_earlyaccessfeature"."documentation_url", + "posthog_earlyaccessfeature"."created_at", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_earlyaccessfeature" + INNER JOIN "posthog_team" ON ("posthog_earlyaccessfeature"."team_id" = "posthog_team"."id") + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_earlyaccessfeature"."feature_flag_id" = "posthog_featureflag"."id") + WHERE ("posthog_earlyaccessfeature"."stage" = 'beta' + AND "posthog_team"."project_id" = 99999) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_cached_team.2 + ''' + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_cached_team.3 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_cached_team.4 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_cached_team.5 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_cached_team.6 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_cached_team.7 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_cached_team.8 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_cached_team.9 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_pre_env_cached_team + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_pre_env_cached_team.1 + ''' + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_pre_env_cached_team.10 + ''' + SELECT "posthog_earlyaccessfeature"."id", + "posthog_earlyaccessfeature"."team_id", + "posthog_earlyaccessfeature"."feature_flag_id", + "posthog_earlyaccessfeature"."name", + "posthog_earlyaccessfeature"."description", + "posthog_earlyaccessfeature"."stage", + "posthog_earlyaccessfeature"."documentation_url", + "posthog_earlyaccessfeature"."created_at", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_earlyaccessfeature" + INNER JOIN "posthog_team" ON ("posthog_earlyaccessfeature"."team_id" = "posthog_team"."id") + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_earlyaccessfeature"."feature_flag_id" = "posthog_featureflag"."id") + WHERE ("posthog_earlyaccessfeature"."stage" = 'beta' + AND "posthog_team"."project_id" = 99999) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_pre_env_cached_team.11 + ''' + SELECT "posthog_earlyaccessfeature"."id", + "posthog_earlyaccessfeature"."team_id", + "posthog_earlyaccessfeature"."feature_flag_id", + "posthog_earlyaccessfeature"."name", + "posthog_earlyaccessfeature"."description", + "posthog_earlyaccessfeature"."stage", + "posthog_earlyaccessfeature"."documentation_url", + "posthog_earlyaccessfeature"."created_at", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_earlyaccessfeature" + INNER JOIN "posthog_team" ON ("posthog_earlyaccessfeature"."team_id" = "posthog_team"."id") + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_earlyaccessfeature"."feature_flag_id" = "posthog_featureflag"."id") + WHERE ("posthog_earlyaccessfeature"."stage" = 'beta' + AND "posthog_team"."project_id" = 99999) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_pre_env_cached_team.2 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_pre_env_cached_team.3 + ''' + SELECT "posthog_remoteconfig"."id", + "posthog_remoteconfig"."team_id", + "posthog_remoteconfig"."config", + "posthog_remoteconfig"."updated_at", + "posthog_remoteconfig"."synced_at" + FROM "posthog_remoteconfig" + WHERE "posthog_remoteconfig"."team_id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_pre_env_cached_team.4 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_pre_env_cached_team.5 + ''' + SELECT COUNT(*) AS "__count" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 99999) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_pre_env_cached_team.6 + ''' + SELECT "posthog_survey"."id", + "posthog_survey"."team_id", + "posthog_survey"."name", + "posthog_survey"."description", + "posthog_survey"."linked_flag_id", + "posthog_survey"."targeting_flag_id", + "posthog_survey"."internal_targeting_flag_id", + "posthog_survey"."internal_response_sampling_flag_id", + "posthog_survey"."type", + "posthog_survey"."conditions", + "posthog_survey"."questions", + "posthog_survey"."appearance", + "posthog_survey"."created_at", + "posthog_survey"."created_by_id", + "posthog_survey"."start_date", + "posthog_survey"."end_date", + "posthog_survey"."updated_at", + "posthog_survey"."archived", + "posthog_survey"."responses_limit", + "posthog_survey"."response_sampling_start_date", + "posthog_survey"."response_sampling_interval_type", + "posthog_survey"."response_sampling_interval", + "posthog_survey"."response_sampling_limit", + "posthog_survey"."response_sampling_daily_limits", + "posthog_survey"."iteration_count", + "posthog_survey"."iteration_frequency_days", + "posthog_survey"."iteration_start_dates", + "posthog_survey"."current_iteration", + "posthog_survey"."current_iteration_start_date", + "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics", + T4."id", + T4."key", + T4."name", + T4."filters", + T4."rollout_percentage", + T4."team_id", + T4."created_by_id", + T4."created_at", + T4."deleted", + T4."active", + T4."rollback_conditions", + T4."performed_rollback", + T4."ensure_experience_continuity", + T4."usage_dashboard_id", + T4."has_enriched_analytics", + T5."id", + T5."key", + T5."name", + T5."filters", + T5."rollout_percentage", + T5."team_id", + T5."created_by_id", + T5."created_at", + T5."deleted", + T5."active", + T5."rollback_conditions", + T5."performed_rollback", + T5."ensure_experience_continuity", + T5."usage_dashboard_id", + T5."has_enriched_analytics" + FROM "posthog_survey" + LEFT OUTER JOIN "posthog_featureflag" ON ("posthog_survey"."linked_flag_id" = "posthog_featureflag"."id") + LEFT OUTER JOIN "posthog_featureflag" T4 ON ("posthog_survey"."targeting_flag_id" = T4."id") + LEFT OUTER JOIN "posthog_featureflag" T5 ON ("posthog_survey"."internal_targeting_flag_id" = T5."id") + WHERE ("posthog_survey"."team_id" = 99999 + AND NOT ("posthog_survey"."archived")) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_pre_env_cached_team.7 + ''' + SELECT "posthog_pluginconfig"."id", + "posthog_pluginsourcefile"."transpiled", + "posthog_pluginconfig"."web_token", + "posthog_plugin"."config_schema", + "posthog_pluginconfig"."config" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 99999) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_pre_env_cached_team.8 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."type", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- +# name: TestPreviewList.test_early_access_features_with_pre_env_cached_team.9 + ''' + SELECT "posthog_user"."id", + "posthog_user"."password", + "posthog_user"."last_login", + "posthog_user"."first_name", + "posthog_user"."last_name", + "posthog_user"."is_staff", + "posthog_user"."date_joined", + "posthog_user"."uuid", + "posthog_user"."current_organization_id", + "posthog_user"."current_team_id", + "posthog_user"."email", + "posthog_user"."pending_email", + "posthog_user"."temporary_token", + "posthog_user"."distinct_id", + "posthog_user"."is_email_verified", + "posthog_user"."has_seen_product_intro_for", + "posthog_user"."strapi_id", + "posthog_user"."is_active", + "posthog_user"."theme_mode", + "posthog_user"."partial_notification_settings", + "posthog_user"."anonymize_data", + "posthog_user"."toolbar_mode", + "posthog_user"."hedgehog_config", + "posthog_user"."events_column_config", + "posthog_user"."email_opt_in" + FROM "posthog_user" + WHERE "posthog_user"."id" = 99999 + LIMIT 21 + ''' +# --- diff --git a/posthog/api/test/__snapshots__/test_event.ambr b/posthog/api/test/__snapshots__/test_event.ambr index c5e560c8bdba2f..6ad9d56dc606cf 100644 --- a/posthog/api/test/__snapshots__/test_event.ambr +++ b/posthog/api/test/__snapshots__/test_event.ambr @@ -1,14 +1,14 @@ # serializer version: 1 # name: TestEvents.test_event_property_values ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age + /* user_id:0 request:_snapshot_ */ + SELECT DISTINCT replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + WHERE team_id = 99999 + AND JSONHas(properties, 'random_prop') + AND timestamp >= '2020-01-13 00:00:00' + AND timestamp <= '2020-01-20 23:59:59' + LIMIT 10 ''' # --- # name: TestEvents.test_event_property_values.1 @@ -20,6 +20,8 @@ AND JSONHas(properties, 'random_prop') AND timestamp >= '2020-01-13 00:00:00' AND timestamp <= '2020-01-20 23:59:59' + AND replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') ILIKE '%qw%' + order by length(replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '')) LIMIT 10 ''' # --- @@ -32,7 +34,7 @@ AND JSONHas(properties, 'random_prop') AND timestamp >= '2020-01-13 00:00:00' AND timestamp <= '2020-01-20 23:59:59' - AND replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') ILIKE '%qw%' + AND replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') ILIKE '%QW%' order by length(replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '')) LIMIT 10 ''' @@ -46,7 +48,7 @@ AND JSONHas(properties, 'random_prop') AND timestamp >= '2020-01-13 00:00:00' AND timestamp <= '2020-01-20 23:59:59' - AND replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') ILIKE '%QW%' + AND replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') ILIKE '%6%' order by length(replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '')) LIMIT 10 ''' @@ -60,6 +62,7 @@ AND JSONHas(properties, 'random_prop') AND timestamp >= '2020-01-13 00:00:00' AND timestamp <= '2020-01-20 23:59:59' + AND (event = 'random event') AND replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') ILIKE '%6%' order by length(replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '')) LIMIT 10 @@ -74,7 +77,8 @@ AND JSONHas(properties, 'random_prop') AND timestamp >= '2020-01-13 00:00:00' AND timestamp <= '2020-01-20 23:59:59' - AND (event = 'random event') + AND (event = 'foo' + OR event = 'random event') AND replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') ILIKE '%6%' order by length(replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '')) LIMIT 10 @@ -89,9 +93,8 @@ AND JSONHas(properties, 'random_prop') AND timestamp >= '2020-01-13 00:00:00' AND timestamp <= '2020-01-20 23:59:59' - AND (event = 'foo' - OR event = 'random event') - AND replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') ILIKE '%6%' + AND (event = '404_i_dont_exist') + AND replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '') ILIKE '%qw%' order by length(replaceRegexpAll(JSONExtractRaw(properties, 'random_prop'), '^"|"$', '')) LIMIT 10 ''' @@ -113,14 +116,14 @@ # --- # name: TestEvents.test_event_property_values_materialized ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age + /* user_id:0 request:_snapshot_ */ + SELECT DISTINCT "mat_random_prop" FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + WHERE team_id = 99999 + AND notEmpty("mat_random_prop") + AND timestamp >= '2020-01-13 00:00:00' + AND timestamp <= '2020-01-20 23:59:59' + LIMIT 10 ''' # --- # name: TestEvents.test_event_property_values_materialized.1 @@ -132,6 +135,8 @@ AND notEmpty("mat_random_prop") AND timestamp >= '2020-01-13 00:00:00' AND timestamp <= '2020-01-20 23:59:59' + AND "mat_random_prop" ILIKE '%qw%' + order by length("mat_random_prop") LIMIT 10 ''' # --- @@ -144,7 +149,7 @@ AND notEmpty("mat_random_prop") AND timestamp >= '2020-01-13 00:00:00' AND timestamp <= '2020-01-20 23:59:59' - AND "mat_random_prop" ILIKE '%qw%' + AND "mat_random_prop" ILIKE '%QW%' order by length("mat_random_prop") LIMIT 10 ''' @@ -158,7 +163,7 @@ AND notEmpty("mat_random_prop") AND timestamp >= '2020-01-13 00:00:00' AND timestamp <= '2020-01-20 23:59:59' - AND "mat_random_prop" ILIKE '%QW%' + AND "mat_random_prop" ILIKE '%6%' order by length("mat_random_prop") LIMIT 10 ''' @@ -172,6 +177,7 @@ AND notEmpty("mat_random_prop") AND timestamp >= '2020-01-13 00:00:00' AND timestamp <= '2020-01-20 23:59:59' + AND (event = 'random event') AND "mat_random_prop" ILIKE '%6%' order by length("mat_random_prop") LIMIT 10 @@ -186,7 +192,8 @@ AND notEmpty("mat_random_prop") AND timestamp >= '2020-01-13 00:00:00' AND timestamp <= '2020-01-20 23:59:59' - AND (event = 'random event') + AND (event = 'foo' + OR event = 'random event') AND "mat_random_prop" ILIKE '%6%' order by length("mat_random_prop") LIMIT 10 @@ -201,9 +208,8 @@ AND notEmpty("mat_random_prop") AND timestamp >= '2020-01-13 00:00:00' AND timestamp <= '2020-01-20 23:59:59' - AND (event = 'foo' - OR event = 'random event') - AND "mat_random_prop" ILIKE '%6%' + AND (event = '404_i_dont_exist') + AND "mat_random_prop" ILIKE '%qw%' order by length("mat_random_prop") LIMIT 10 ''' diff --git a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr index 4f5f8483b7bbb8..5b8721a2cd48fa 100644 --- a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr @@ -354,9 +354,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -1593,9 +1658,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -1906,24 +2036,61 @@ # --- # name: TestOrganizationFeatureFlagCopy.test_copy_feature_flag_create_new.53 ''' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 + SELECT "posthog_dashboard"."id", + "posthog_dashboard"."name", + "posthog_dashboard"."description", + "posthog_dashboard"."team_id", + "posthog_dashboard"."pinned", + "posthog_dashboard"."created_at", + "posthog_dashboard"."created_by_id", + "posthog_dashboard"."deleted", + "posthog_dashboard"."last_accessed_at", + "posthog_dashboard"."filters", + "posthog_dashboard"."variables", + "posthog_dashboard"."creation_mode", + "posthog_dashboard"."restriction_level", + "posthog_dashboard"."deprecated_tags", + "posthog_dashboard"."tags", + "posthog_dashboard"."share_token", + "posthog_dashboard"."is_shared" + FROM "posthog_dashboard" + INNER JOIN "posthog_featureflagdashboards" ON ("posthog_dashboard"."id" = "posthog_featureflagdashboards"."dashboard_id") + WHERE (NOT ("posthog_dashboard"."deleted") + AND "posthog_featureflagdashboards"."feature_flag_id" = 99999) ''' # --- # name: TestOrganizationFeatureFlagCopy.test_copy_feature_flag_create_new.54 ''' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."logo_media_id", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE ("posthog_organizationmembership"."organization_id" = '00000000-0000-0000-0000-000000000000'::uuid + AND "posthog_organizationmembership"."user_id" = 99999) + LIMIT 21 ''' # --- # name: TestOrganizationFeatureFlagCopy.test_copy_feature_flag_create_new.55 diff --git a/posthog/api/test/__snapshots__/test_survey.ambr b/posthog/api/test/__snapshots__/test_survey.ambr index 3d5e9328d39613..f4e08a30e16220 100644 --- a/posthog/api/test/__snapshots__/test_survey.ambr +++ b/posthog/api/test/__snapshots__/test_survey.ambr @@ -397,9 +397,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -740,9 +805,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -1141,9 +1271,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -1470,9 +1665,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -1828,9 +2088,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', diff --git a/posthog/api/test/dashboards/test_dashboard.py b/posthog/api/test/dashboards/test_dashboard.py index ee802318db349f..63f40588999d25 100644 --- a/posthog/api/test/dashboards/test_dashboard.py +++ b/posthog/api/test/dashboards/test_dashboard.py @@ -1313,7 +1313,7 @@ def test_create_from_template_json_can_provide_query_tile(self) -> None: "effective_privilege_level": 37, "effective_restriction_level": 21, "favorited": False, - "filters": {"filter_test_accounts": True}, + "filters": {}, "filters_hash": ANY, "hasMore": None, "id": ANY, diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index fc387e21b47109..bbf74ee1ecb72b 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -5,6 +5,7 @@ from typing import Optional from unittest.mock import patch, Mock +from inline_snapshot import snapshot import pytest from django.conf import settings from django.core.cache import cache @@ -42,6 +43,7 @@ from posthog.models.person import PersonDistinctId from posthog.models.personal_api_key import hash_key_value from posthog.models.plugin import sync_team_inject_web_apps +from posthog.models.remote_config import RemoteConfig from posthog.models.team.team import Team from posthog.models.user import User from posthog.models.utils import generate_random_token_personal @@ -86,6 +88,8 @@ class TestDecide(BaseTest, QueryMatchingTest): We use Django's base test class instead of DRF's because we need granular control over the Content-Type sent over. """ + use_remote_config = False + def setUp(self, *args): cache.clear() @@ -113,27 +117,49 @@ def _post_decide( ip="127.0.0.1", disable_flags=False, user_agent: Optional[str] = None, + assert_num_queries: Optional[int] = None, + simulate_database_timeout: bool = False, ): + if self.use_remote_config: + # We test a lot with settings changes so the idea is to refresh the remote config + remote_config = RemoteConfig.objects.get(team=self.team) + remote_config.sync() + if groups is None: groups = {} - return self.client.post( - f"/decide/?v={api_version}", - { - "data": self._dict_to_b64( - data - or { - "token": self.team.api_token, - "distinct_id": distinct_id, - "groups": groups, - "geoip_disable": geoip_disable, - "disable_flags": disable_flags, - }, - ) - }, - HTTP_ORIGIN=origin, - REMOTE_ADDR=ip, - HTTP_USER_AGENT=user_agent or "PostHog test", - ) + + def do_request(): + url = f"/decide/?v={api_version}" + if self.use_remote_config: + url += "&use_remote_config=true" + return self.client.post( + url, + { + "data": self._dict_to_b64( + data + or { + "token": self.team.api_token, + "distinct_id": distinct_id, + "groups": groups, + "geoip_disable": geoip_disable, + "disable_flags": disable_flags, + }, + ) + }, + HTTP_ORIGIN=origin, + REMOTE_ADDR=ip, + HTTP_USER_AGENT=user_agent or "PostHog test", + ) + + if simulate_database_timeout: + with connection.execute_wrapper(QueryTimeoutWrapper()): + return do_request() + + if assert_num_queries: + with self.assertNumQueries(assert_num_queries): + return do_request() + else: + return do_request() def _update_team(self, data, expected_status_code: int = status.HTTP_200_OK): # use a non-csrf client to make requests @@ -547,7 +573,7 @@ def test_web_vitals_autocapture_allowed_metrics(self, *args): {"web_vitals": True, "network_timing": True, "web_vitals_allowed_metrics": ["CLS", "FCP"]}, ) - def test_user_session_recording_opt_in_wildcard_domain(self, *args): + def test_user_session_recording_domain_opt_in_wildcard(self, *args): # :TRICKY: Test for regression around caching response = self._post_decide().json() self.assertEqual(response["sessionRecording"], False) @@ -567,7 +593,7 @@ def test_user_session_recording_opt_in_wildcard_domain(self, *args): response = self._post_decide(origin="https://random.example.com.evilsite.com").json() self.assertEqual(response["sessionRecording"], False) - def test_user_session_recording_evil_site(self, *args): + def test_user_session_recording_domain_not_allowed(self, *args): self._update_team( { "session_recording_opt_in": True, @@ -642,9 +668,8 @@ def test_user_session_recording_allowed_when_permitted_domains_are_not_http_base @snapshot_postgres_queries def test_web_app_queries(self, *args): - with self.assertNumQueries(2): - response = self._post_decide() - self.assertEqual(response.status_code, status.HTTP_200_OK) + response = self._post_decide(assert_num_queries=2) + self.assertEqual(response.status_code, status.HTTP_200_OK) plugin = Plugin.objects.create(organization=self.team.organization, name="My Plugin", plugin_type="source") PluginSourceFile.objects.create( @@ -666,11 +691,10 @@ def test_web_app_queries(self, *args): # caching flag definitions in the above mean fewer queries # 3 of these queries are just for setting transaction scope - with self.assertNumQueries(4): - response = self._post_decide() - self.assertEqual(response.status_code, status.HTTP_200_OK) - injected = response.json()["siteApps"] - self.assertEqual(len(injected), 1) + response = self._post_decide(assert_num_queries=0 if self.use_remote_config else 4) + self.assertEqual(response.status_code, status.HTTP_200_OK) + injected = response.json()["siteApps"] + self.assertEqual(len(injected), 1) def test_site_app_injection(self, *args): plugin = Plugin.objects.create(organization=self.team.organization, name="My Plugin", plugin_type="source") @@ -691,12 +715,11 @@ def test_site_app_injection(self, *args): ) self.team.refresh_from_db() self.assertTrue(self.team.inject_web_apps) - with self.assertNumQueries(5): - response = self._post_decide() - self.assertEqual(response.status_code, status.HTTP_200_OK) - injected = response.json()["siteApps"] - self.assertEqual(len(injected), 1) - self.assertTrue(injected[0]["url"].startswith(f"/site_app/{plugin_config.id}/{plugin_config.web_token}/")) + response = self._post_decide(assert_num_queries=1 if self.use_remote_config else 5) + self.assertEqual(response.status_code, status.HTTP_200_OK) + injected = response.json()["siteApps"] + self.assertEqual(len(injected), 1) + self.assertTrue(injected[0]["url"].startswith(f"/site_app/{plugin_config.id}/{plugin_config.web_token}/")) def test_feature_flags(self, *args): self.team.app_urls = ["https://example.com"] @@ -751,17 +774,15 @@ def test_feature_flags(self, *args): created_by=self.user, ) - with self.assertNumQueries(4): - response = self._post_decide() - self.assertEqual(response.status_code, status.HTTP_200_OK) + response = self._post_decide(assert_num_queries=4) + self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn("default-flag", response.json()["featureFlags"]) self.assertIn("beta-feature", response.json()["featureFlags"]) self.assertIn("filer-by-property-2", response.json()["featureFlags"]) # caching flag definitions in the above query mean fewer queries - with self.assertNumQueries(4): - response = self._post_decide({"token": self.team.api_token, "distinct_id": "another_id"}) - self.assertEqual(response.status_code, status.HTTP_200_OK) + response = self._post_decide({"token": self.team.api_token, "distinct_id": "another_id"}, assert_num_queries=4) + self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.json()["featureFlags"], ["default-flag"]) def test_feature_flags_v3_json(self, *args): @@ -796,9 +817,8 @@ def test_feature_flags_v3_json(self, *args): created_by=self.user, ) - with self.assertNumQueries(4): - response = self._post_decide(api_version=3) - self.assertEqual(response.status_code, status.HTTP_200_OK) + response = self._post_decide(api_version=3, assert_num_queries=4) + self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( {"color": "blue"}, @@ -859,20 +879,18 @@ def test_feature_flags_v3_json_multivariate(self, *args): ) # caching flag definitions mean fewer queries - with self.assertNumQueries(0): - response = self._post_decide(api_version=2) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertIn("beta-feature", response.json()["featureFlags"]) - self.assertEqual("first-variant", response.json()["featureFlags"]["multivariate-flag"]) + response = self._post_decide(api_version=2, assert_num_queries=0) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("beta-feature", response.json()["featureFlags"]) + self.assertEqual("first-variant", response.json()["featureFlags"]["multivariate-flag"]) - with self.assertNumQueries(0): - response = self._post_decide(api_version=3) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual("first-variant", response.json()["featureFlags"]["multivariate-flag"]) - self.assertEqual( - {"color": "blue"}, - response.json()["featureFlagPayloads"]["multivariate-flag"], - ) + response = self._post_decide(api_version=3, assert_num_queries=0) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual("first-variant", response.json()["featureFlags"]["multivariate-flag"]) + self.assertEqual( + {"color": "blue"}, + response.json()["featureFlagPayloads"]["multivariate-flag"], + ) def test_feature_flags_v2(self, *args): self.team.app_urls = ["https://example.com"] @@ -926,28 +944,25 @@ def test_feature_flags_v2(self, *args): created_by=self.user, ) - with self.assertNumQueries(0): - response = self._post_decide(api_version=1) # v1 functionality should not break - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertIn("beta-feature", response.json()["featureFlags"]) - self.assertIn("default-flag", response.json()["featureFlags"]) + response = self._post_decide(api_version=1, assert_num_queries=0) # v1 functionality should not break + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertIn("beta-feature", response.json()["featureFlags"]) + self.assertIn("default-flag", response.json()["featureFlags"]) # caching flag definitions in the above query mean fewer queries - with self.assertNumQueries(0): - response = self._post_decide(api_version=2) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # assigned by distinct_id hash + response = self._post_decide(api_version=2, assert_num_queries=0) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # assigned by distinct_id hash - with self.assertNumQueries(0): - response = self._post_decide(api_version=2, distinct_id="other_id") - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "third-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # different hash, different variant assigned + response = self._post_decide(api_version=2, distinct_id="other_id", assert_num_queries=0) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "third-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # different hash, different variant assigned def test_feature_flags_v2_with_property_overrides(self, *args): self.team.app_urls = ["https://example.com"] @@ -1023,16 +1038,14 @@ def test_feature_flags_v2_with_property_overrides(self, *args): created_by=self.user, ) - with self.assertNumQueries(0): - response = self._post_decide(api_version=2, ip=australia_ip) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue("multivariate-flag" not in response.json()["featureFlags"]) + response = self._post_decide(api_version=2, ip=australia_ip, assert_num_queries=0) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue("multivariate-flag" not in response.json()["featureFlags"]) # caching flag definitions in the above mean fewer queries - with self.assertNumQueries(0): - response = self._post_decide(api_version=2, distinct_id="other_id", ip=australia_ip) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue("multivariate-flag" not in response.json()["featureFlags"]) + response = self._post_decide(api_version=2, distinct_id="other_id", ip=australia_ip, assert_num_queries=0) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue("multivariate-flag" not in response.json()["featureFlags"]) def test_feature_flags_v2_with_geoip_error(self, *args): self.team.app_urls = ["https://example.com"] @@ -1107,16 +1120,13 @@ def test_feature_flags_v2_with_geoip_error(self, *args): ) # caching flag definitions mean fewer queries - with self.assertNumQueries(4): - # One to compute properties for all flags - response = self._post_decide(api_version=2, distinct_id="example_id") - self.assertTrue("beta-feature" not in response.json()["featureFlags"]) - self.assertEqual("first-variant", response.json()["featureFlags"]["multivariate-flag"]) + response = self._post_decide(api_version=2, distinct_id="example_id", assert_num_queries=4) + self.assertTrue("beta-feature" not in response.json()["featureFlags"]) + self.assertEqual("first-variant", response.json()["featureFlags"]["multivariate-flag"]) - with self.assertNumQueries(4): - response = self._post_decide(api_version=2, distinct_id="other_id") - self.assertTrue("beta-feature" not in response.json()["featureFlags"]) - self.assertTrue("multivariate-flag" not in response.json()["featureFlags"]) + response = self._post_decide(api_version=2, distinct_id="other_id", assert_num_queries=4) + self.assertTrue("beta-feature" not in response.json()["featureFlags"]) + self.assertTrue("multivariate-flag" not in response.json()["featureFlags"]) def test_feature_flags_v2_consistent_flags(self, *args): self.team.app_urls = ["https://example.com"] @@ -1173,33 +1183,32 @@ def test_feature_flags_v2_consistent_flags(self, *args): ) # caching flag definitions mean fewer queries - with self.assertNumQueries(5): - response = self._post_decide(api_version=2) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # assigned by distinct_id hash + response = self._post_decide(api_version=2, assert_num_queries=5) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # assigned by distinct_id hash # new person, merged from old distinct ID # person.delete() # person2 = Person.objects.create(team=self.team, distinct_ids=["example_id", "other_id"], properties={"email": "tim@posthog.com"}) person.add_distinct_id("other_id") - with self.assertNumQueries(13): - response = self._post_decide( - api_version=2, - data={ - "token": self.team.api_token, - "distinct_id": "other_id", - "$anon_distinct_id": "example_id", - }, - ) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # different hash, overridden by distinct_id, same variant assigned + response = self._post_decide( + api_version=2, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example_id", + }, + assert_num_queries=13, + ) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # different hash, overridden by distinct_id, same variant assigned def test_feature_flags_v3_consistent_flags_with_numeric_distinct_ids(self, *args): self.team.app_urls = ["https://example.com"] @@ -1233,46 +1242,45 @@ def test_feature_flags_v3_consistent_flags_with_numeric_distinct_ids(self, *args ) # Should be enabled for everyone # caching flag definitions mean fewer queries - with self.assertNumQueries(5): - response = self._post_decide(api_version=2) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) + response = self._post_decide(api_version=2, assert_num_queries=5) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) - with self.assertNumQueries(13): - response = self._post_decide( - api_version=2, - data={ - "token": self.team.api_token, - "distinct_id": 12345, - "$anon_distinct_id": "example_id", - }, - ) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) + response = self._post_decide( + api_version=2, + data={ + "token": self.team.api_token, + "distinct_id": 12345, + "$anon_distinct_id": "example_id", + }, + assert_num_queries=13, + ) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) - with self.assertNumQueries(9): - response = self._post_decide( - api_version=2, - data={ - "token": self.team.api_token, - "distinct_id": "xyz", - "$anon_distinct_id": 12345, - }, - ) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) + response = self._post_decide( + api_version=2, + data={ + "token": self.team.api_token, + "distinct_id": "xyz", + "$anon_distinct_id": 12345, + }, + assert_num_queries=9, + ) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) - with self.assertNumQueries(9): - response = self._post_decide( - api_version=2, - data={ - "token": self.team.api_token, - "distinct_id": 5, - "$anon_distinct_id": 12345, - }, - ) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) + response = self._post_decide( + api_version=2, + data={ + "token": self.team.api_token, + "distinct_id": 5, + "$anon_distinct_id": 12345, + }, + assert_num_queries=9, + ) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) def test_feature_flags_v2_consistent_flags_with_ingestion_delays(self, *args): self.team.app_urls = ["https://example.com"] @@ -1327,31 +1335,30 @@ def test_feature_flags_v2_consistent_flags_with_ingestion_delays(self, *args): ) # caching flag definitions mean fewer queries - with self.assertNumQueries(4): - response = self._post_decide(api_version=2) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # assigned by distinct_id hash + response = self._post_decide(api_version=2, assert_num_queries=4) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # assigned by distinct_id hash # identify event is sent, but again, ingestion delays, so no entry in personDistinctID table # person.add_distinct_id("other_id") # in which case, we're pretty much trashed - with self.assertNumQueries(12): - response = self._post_decide( - api_version=2, - data={ - "token": self.team.api_token, - "distinct_id": "other_id", - "$anon_distinct_id": "example_id", - }, - ) - # self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "third-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # different hash, should've been overridden by distinct_id, but ingestion delays mean different variant assigned + response = self._post_decide( + api_version=2, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example_id", + }, + assert_num_queries=12, + ) + # self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "third-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # different hash, should've been overridden by distinct_id, but ingestion delays mean different variant assigned def test_feature_flags_v2_consistent_flags_with_merged_persons(self, *args): self.team.app_urls = ["https://example.com"] @@ -1408,13 +1415,12 @@ def test_feature_flags_v2_consistent_flags_with_merged_persons(self, *args): ) # caching flag definitions mean fewer queries - with self.assertNumQueries(5): - response = self._post_decide(api_version=2) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # assigned by distinct_id hash + response = self._post_decide(api_version=2, assert_num_queries=5) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # assigned by distinct_id hash # new person, created separately before "example_id" came into the picture. # on identify, this will trigger a merge with person.id being deleted, and @@ -1426,20 +1432,20 @@ def test_feature_flags_v2_consistent_flags_with_merged_persons(self, *args): ) # caching flag definitions in the above mean fewer queries - with self.assertNumQueries(13): - response = self._post_decide( - api_version=2, - data={ - "token": self.team.api_token, - "distinct_id": "other_id", - "$anon_distinct_id": "example_id", - }, - ) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # different hash, overridden by distinct_id, same variant assigned + response = self._post_decide( + api_version=2, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example_id", + }, + assert_num_queries=13, + ) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # different hash, overridden by distinct_id, same variant assigned # now let's say a merge happens with a call like: identify(distinct_id='example_id', anon_distinct_id='other_id') # that is, person2 is going to get merged into person. (Could've been vice versa, but the following code assumes this, it's symmetric.) @@ -1465,16 +1471,16 @@ def test_feature_flags_v2_consistent_flags_with_merged_persons(self, *args): person.add_distinct_id("other_id") # caching flag definitions in the above mean fewer queries - with self.assertNumQueries(5): - response = self._post_decide( - api_version=2, - data={"token": self.team.api_token, "distinct_id": "other_id"}, - ) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # different hash, overridden by distinct_id, same variant assigned + response = self._post_decide( + api_version=2, + data={"token": self.team.api_token, "distinct_id": "other_id"}, + assert_num_queries=5, + ) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # different hash, overridden by distinct_id, same variant assigned def test_feature_flags_v2_consistent_flags_with_delayed_new_identified_person(self, *args): self.team.app_urls = ["https://example.com"] @@ -1531,32 +1537,30 @@ def test_feature_flags_v2_consistent_flags_with_delayed_new_identified_person(se ) # caching flag definitions mean fewer queries - with self.assertNumQueries(5): - response = self._post_decide(api_version=2) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # assigned by distinct_id hash + response = self._post_decide(api_version=2, assert_num_queries=5) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # assigned by distinct_id hash # new person with "other_id" is yet to be created # caching flag definitions in the above mean fewer queries - with self.assertNumQueries(13): - # one extra query to find person_id for $anon_distinct_id - response = self._post_decide( - api_version=2, - data={ - "token": self.team.api_token, - "distinct_id": "other_id", - "$anon_distinct_id": "example_id", - }, - ) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # different hash, overridden by distinct_id, same variant assigned + response = self._post_decide( + api_version=2, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example_id", + }, + assert_num_queries=13, + ) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # different hash, overridden by distinct_id, same variant assigned # calling a simple decide call, while 'other_id' is still missing a person creation. # In this case, we are over our grace period for ingestion, and there's @@ -1564,31 +1568,29 @@ def test_feature_flags_v2_consistent_flags_with_delayed_new_identified_person(se # So, things appear like a completely new person with distinct-id = other_id. # And this person can't have any hash key overrides (since the person doesn't yet exist) # So one fewer query to not get overrides. - with self.assertNumQueries(4): - # caching flag definitions in the above mean fewer queries - - response = self._post_decide( - api_version=2, - data={"token": self.team.api_token, "distinct_id": "other_id"}, - ) - # self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual("third-variant", response.json()["featureFlags"]["multivariate-flag"]) # variant changed + response = self._post_decide( + api_version=2, + data={"token": self.team.api_token, "distinct_id": "other_id"}, + assert_num_queries=4, + ) + # self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual("third-variant", response.json()["featureFlags"]["multivariate-flag"]) # variant changed person.add_distinct_id("other_id") # Finally, 'other_id' is merged. The result goes back to its overridden values # caching flag definitions in the above mean fewer queries - with self.assertNumQueries(5): - response = self._post_decide( - api_version=2, - data={"token": self.team.api_token, "distinct_id": "other_id"}, - ) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # different hash, overridden by distinct_id, same variant assigned + response = self._post_decide( + api_version=2, + data={"token": self.team.api_token, "distinct_id": "other_id"}, + assert_num_queries=5, + ) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # different hash, overridden by distinct_id, same variant assigned def test_feature_flags_v2_complex(self, *args): self.team.app_urls = ["https://example.com"] @@ -1651,30 +1653,26 @@ def test_feature_flags_v2_complex(self, *args): ) # caching flag definitions mean fewer queries - with self.assertNumQueries(4): - response = self._post_decide(api_version=2, distinct_id="hosted_id") - self.assertIsNone( - (response.json()["featureFlags"]).get("multivariate-flag", None) - ) # User is does not have realm == "cloud". Value is None. - self.assertTrue( - (response.json()["featureFlags"]).get("default-flag") - ) # User still receives the default flag - - with self.assertNumQueries(4): - response = self._post_decide(api_version=2, distinct_id="example_id") - self.assertIsNotNone( - response.json()["featureFlags"]["multivariate-flag"] - ) # User has an 80% chance of being assigned any non-empty value. - self.assertEqual( - "second-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # If the user falls in the rollout group, they have a 25% chance of being assigned any particular variant. - # Their overall probability is therefore 80% * 25% = 20%. - # To give another example, if n = 100 Cloud users and rollout_percentage = 80: - # None: 20 (100 * (100% - 80%)) - # first-variant: 20 (100 * 80% * 25% = 20 users) - # second-variant: 20 (100 * 80% * 25% = 20 users) - # third-variant: 20 (100 * 80% * 25% = 20 users) - # fourth-variant: 20 (100 * 80% * 25% = 20 users) + response = self._post_decide(api_version=2, distinct_id="hosted_id", assert_num_queries=4) + self.assertIsNone( + (response.json()["featureFlags"]).get("multivariate-flag", None) + ) # User is does not have realm == "cloud". Value is None. + self.assertTrue((response.json()["featureFlags"]).get("default-flag")) # User still receives the default flag + + response = self._post_decide(api_version=2, distinct_id="example_id", assert_num_queries=4) + self.assertIsNotNone( + response.json()["featureFlags"]["multivariate-flag"] + ) # User has an 80% chance of being assigned any non-empty value. + self.assertEqual( + "second-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # If the user falls in the rollout group, they have a 25% chance of being assigned any particular variant. + # Their overall probability is therefore 80% * 25% = 20%. + # To give another example, if n = 100 Cloud users and rollout_percentage = 80: + # None: 20 (100 * (100% - 80%)) + # first-variant: 20 (100 * 80% * 25% = 20 users) + # second-variant: 20 (100 * 80% * 25% = 20 users) + # third-variant: 20 (100 * 80% * 25% = 20 users) + # fourth-variant: 20 (100 * 80% * 25% = 20 users) def test_feature_flags_v3(self, *args): self.team.app_urls = ["https://example.com"] @@ -1753,23 +1751,21 @@ def test_feature_flags_v3(self, *args): self._post_decide(api_version=3) client.logout() - with self.assertNumQueries(0): - response = self._post_decide(api_version=3) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # assigned by distinct_id hash - self.assertFalse(response.json()["errorsWhileComputingFlags"]) + response = self._post_decide(api_version=3, assert_num_queries=0) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # assigned by distinct_id hash + self.assertFalse(response.json()["errorsWhileComputingFlags"]) - with self.assertNumQueries(0): - response = self._post_decide(api_version=3, distinct_id="other_id") - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "third-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # different hash, different variant assigned - self.assertFalse(response.json()["errorsWhileComputingFlags"]) + response = self._post_decide(api_version=3, distinct_id="other_id", assert_num_queries=0) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "third-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # different hash, different variant assigned + self.assertFalse(response.json()["errorsWhileComputingFlags"]) @patch("posthog.models.feature_flag.flag_matching.FLAG_EVALUATION_ERROR_COUNTER") def test_feature_flags_v3_with_database_errors(self, mock_counter, *args): @@ -1864,24 +1860,22 @@ def test_feature_flags_v3_with_database_errors(self, mock_counter, *args): client.logout() - with self.assertNumQueries(4): - response = self._post_decide(api_version=3) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # assigned by distinct_id hash - self.assertFalse(response.json()["errorsWhileComputingFlags"]) + response = self._post_decide(api_version=3, assert_num_queries=4) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # assigned by distinct_id hash + self.assertFalse(response.json()["errorsWhileComputingFlags"]) # now database is down - with connection.execute_wrapper(QueryTimeoutWrapper()): - response = self._post_decide(api_version=3, distinct_id="example_id") - self.assertTrue("beta-feature" not in response.json()["featureFlags"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual("first-variant", response.json()["featureFlags"]["multivariate-flag"]) - self.assertTrue(response.json()["errorsWhileComputingFlags"]) + response = self._post_decide(api_version=3, distinct_id="example_id", simulate_database_timeout=True) + self.assertTrue("beta-feature" not in response.json()["featureFlags"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual("first-variant", response.json()["featureFlags"]["multivariate-flag"]) + self.assertTrue(response.json()["errorsWhileComputingFlags"]) - mock_counter.labels.assert_called_once_with(reason="timeout") + mock_counter.labels.assert_called_once_with(reason="timeout") @patch("posthog.models.feature_flag.flag_matching.FLAG_HASH_KEY_WRITES_COUNTER") @patch("posthog.api.decide.FLAG_EVALUATION_COUNTER") @@ -1998,15 +1992,14 @@ def test_feature_flags_v3_metric_counter(self, mock_error_counter, mock_counter, mock_counter.reset_mock() mock_hash_key_counter.reset_mock() - with self.assertNumQueries(9): - response = self._post_decide(api_version=3) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", - response.json()["featureFlags"]["multivariate-flag"], - ) # assigned by distinct_id hash - self.assertFalse(response.json()["errorsWhileComputingFlags"]) + response = self._post_decide(api_version=3, assert_num_queries=9) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", + response.json()["featureFlags"]["multivariate-flag"], + ) # assigned by distinct_id hash + self.assertFalse(response.json()["errorsWhileComputingFlags"]) mock_counter.labels.assert_called_once_with( team_id=str(self.team.pk), @@ -2020,24 +2013,23 @@ def test_feature_flags_v3_metric_counter(self, mock_error_counter, mock_counter, mock_counter.reset_mock() # now database is down - with connection.execute_wrapper(QueryTimeoutWrapper()): - response = self._post_decide(api_version=3, distinct_id="example_id") - self.assertTrue("beta-feature" not in response.json()["featureFlags"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertTrue("multivariate-flag" not in response.json()["featureFlags"]) - self.assertTrue(response.json()["errorsWhileComputingFlags"]) - - mock_counter.labels.assert_called_once_with( - team_id=str(self.team.pk), - errors_computing=True, - has_hash_key_override=False, - ) - mock_counter.labels.return_value.inc.assert_called_once() - mock_error_counter.labels.assert_any_call(reason="healthcheck_failed") - mock_error_counter.labels.assert_any_call(reason="timeout") - self.assertEqual(mock_error_counter.labels.call_count, 2) + response = self._post_decide(api_version=3, distinct_id="example_id", simulate_database_timeout=True) + self.assertTrue("beta-feature" not in response.json()["featureFlags"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertTrue("multivariate-flag" not in response.json()["featureFlags"]) + self.assertTrue(response.json()["errorsWhileComputingFlags"]) - mock_hash_key_counter.labels.assert_not_called() + mock_counter.labels.assert_called_once_with( + team_id=str(self.team.pk), + errors_computing=True, + has_hash_key_override=False, + ) + mock_counter.labels.return_value.inc.assert_called_once() + mock_error_counter.labels.assert_any_call(reason="healthcheck_failed") + mock_error_counter.labels.assert_any_call(reason="timeout") + self.assertEqual(mock_error_counter.labels.call_count, 2) + + mock_hash_key_counter.labels.assert_not_called() def test_feature_flags_v3_with_database_errors_and_no_flags(self, *args): self.team.app_urls = ["https://example.com"] @@ -2053,16 +2045,13 @@ def test_feature_flags_v3_with_database_errors_and_no_flags(self, *args): # adding team to cache self._post_decide(api_version=3) - with self.assertNumQueries(0): - response = self._post_decide(api_version=3) - self.assertEqual(response.json()["featureFlags"], {}) - self.assertFalse(response.json()["errorsWhileComputingFlags"]) + response = self._post_decide(api_version=3, assert_num_queries=0) + self.assertEqual(response.json()["featureFlags"], {}) + self.assertFalse(response.json()["errorsWhileComputingFlags"]) - # now database is down - with connection.execute_wrapper(QueryTimeoutWrapper()): - response = self._post_decide(api_version=3, distinct_id="example_id") - self.assertEqual(response.json()["featureFlags"], {}) - self.assertFalse(response.json()["errorsWhileComputingFlags"]) + response = self._post_decide(api_version=3, distinct_id="example_id", simulate_database_timeout=True) + self.assertEqual(response.json()["featureFlags"], {}) + self.assertFalse(response.json()["errorsWhileComputingFlags"]) def test_feature_flags_v3_with_database_errors_and_geoip_properties(self, *args): self.team.app_urls = ["https://example.com"] @@ -2122,18 +2111,17 @@ def test_feature_flags_v3_with_database_errors_and_geoip_properties(self, *args) client.logout() - with self.assertNumQueries(0): - response = self._post_decide(api_version=3, ip=australia_ip) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertFalse(response.json()["errorsWhileComputingFlags"]) + response = self._post_decide(api_version=3, ip=australia_ip, assert_num_queries=0) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertFalse(response.json()["errorsWhileComputingFlags"]) - # now database is down - with connection.execute_wrapper(QueryTimeoutWrapper()): - response = self._post_decide(api_version=3, distinct_id="example_id", ip=australia_ip) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertFalse(response.json()["errorsWhileComputingFlags"]) + response = self._post_decide( + api_version=3, distinct_id="example_id", ip=australia_ip, simulate_database_timeout=True + ) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertFalse(response.json()["errorsWhileComputingFlags"]) def test_feature_flags_v3_consistent_flags_with_database_errors(self, *args): self.team.app_urls = ["https://example.com"] @@ -2191,38 +2179,37 @@ def test_feature_flags_v3_consistent_flags_with_database_errors(self, *args): # make sure caches are populated response = self._post_decide(api_version=3) - with self.assertNumQueries(5): - # effectively 3 queries, wrapped around by an atomic transaction - # E 1. SAVEPOINT "s4379526528_x103" - # E 2. SET LOCAL statement_timeout = 1000 - # E 3. SELECT "posthog_persondistinctid"."person_id", "posthog_persondistinctid"."distinct_id" FROM "posthog_persondistinctid" - # WHERE ("posthog_persondistinctid"."distinct_id" IN ('example_id') AND "posthog_persondistinctid"."team_id" = 1) - # E 4. SELECT "posthog_featureflaghashkeyoverride"."feature_flag_key", "posthog_featureflaghashkeyoverride"."hash_key", "posthog_featureflaghashkeyoverride"."person_id" FROM "posthog_featureflaghashkeyoverride" - # WHERE ("posthog_featureflaghashkeyoverride"."person_id" IN (7) AND "posthog_featureflaghashkeyoverride"."team_id" = 1) - # E 5. RELEASE SAVEPOINT "s4379526528_x103" - response = self._post_decide(api_version=3) - self.assertTrue(response.json()["featureFlags"]["beta-feature"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] - ) # assigned by distinct_id hash + # effectively 3 queries, wrapped around by an atomic transaction + # E 1. SAVEPOINT "s4379526528_x103" + # E 2. SET LOCAL statement_timeout = 1000 + # E 3. SELECT "posthog_persondistinctid"."person_id", "posthog_persondistinctid"."distinct_id" FROM "posthog_persondistinctid" + # WHERE ("posthog_persondistinctid"."distinct_id" IN ('example_id') AND "posthog_persondistinctid"."team_id" = 1) + # E 4. SELECT "posthog_featureflaghashkeyoverride"."feature_flag_key", "posthog_featureflaghashkeyoverride"."hash_key", "posthog_featureflaghashkeyoverride"."person_id" FROM "posthog_featureflaghashkeyoverride" + # WHERE ("posthog_featureflaghashkeyoverride"."person_id" IN (7) AND "posthog_featureflaghashkeyoverride"."team_id" = 1) + # E 5. RELEASE SAVEPOINT "s4379526528_x103" + response = self._post_decide(api_version=3, assert_num_queries=5) + self.assertTrue(response.json()["featureFlags"]["beta-feature"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertEqual( + "first-variant", response.json()["featureFlags"]["multivariate-flag"] + ) # assigned by distinct_id hash # new person, merged from old distinct ID person.add_distinct_id("other_id") # now database is down - with connection.execute_wrapper(QueryTimeoutWrapper()): - response = self._post_decide( - api_version=3, - data={ - "token": self.team.api_token, - "distinct_id": "other_id", - "$anon_distinct_id": "example_id", - }, - ) - self.assertTrue("beta-feature" not in response.json()["featureFlags"]) - self.assertTrue(response.json()["featureFlags"]["default-flag"]) - self.assertTrue(response.json()["errorsWhileComputingFlags"]) + response = self._post_decide( + api_version=3, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example_id", + }, + simulate_database_timeout=True, + ) + self.assertTrue("beta-feature" not in response.json()["featureFlags"]) + self.assertTrue(response.json()["featureFlags"]["default-flag"]) + self.assertTrue(response.json()["errorsWhileComputingFlags"]) def test_feature_flags_v2_with_groups(self, *args): # More in-depth tests in posthog/api/test/test_feature_flag.py @@ -2250,13 +2237,13 @@ def test_feature_flags_v2_with_groups(self, *args): ) # caching flag definitions mean fewer queries - with self.assertNumQueries(4): - response = self._post_decide(api_version=2, distinct_id="example_id") - self.assertEqual(response.json()["featureFlags"], {}) + response = self._post_decide(api_version=2, distinct_id="example_id", assert_num_queries=4) + self.assertEqual(response.json()["featureFlags"], {}) - with self.assertNumQueries(4): - response = self._post_decide(api_version=2, distinct_id="example_id", groups={"organization": "foo"}) - self.assertEqual(response.json()["featureFlags"], {"groups-flag": True}) + response = self._post_decide( + api_version=2, distinct_id="example_id", groups={"organization": "foo"}, assert_num_queries=4 + ) + self.assertEqual(response.json()["featureFlags"], {"groups-flag": True}) def test_feature_flags_with_personal_api_key(self, *args): key_value = generate_random_token_personal() @@ -2328,16 +2315,13 @@ def test_flag_with_regular_cohorts(self, *args): created_by=self.user, ) - with self.assertNumQueries(5): - response = self._post_decide(api_version=3, distinct_id="example_id_1") - self.assertEqual(response.json()["featureFlags"], {"cohort-flag": True}) - self.assertEqual(response.json()["errorsWhileComputingFlags"], False) + response = self._post_decide(api_version=3, distinct_id="example_id_1", assert_num_queries=5) + self.assertEqual(response.json()["featureFlags"], {"cohort-flag": True}) + self.assertEqual(response.json()["errorsWhileComputingFlags"], False) - with self.assertNumQueries(5): - # get cohort, get person filter - response = self._post_decide(api_version=3, distinct_id="another_id") - self.assertEqual(response.json()["featureFlags"], {"cohort-flag": False}) - self.assertEqual(response.json()["errorsWhileComputingFlags"], False) + response = self._post_decide(api_version=3, distinct_id="another_id", assert_num_queries=5) + self.assertEqual(response.json()["featureFlags"], {"cohort-flag": False}) + self.assertEqual(response.json()["errorsWhileComputingFlags"], False) def test_flag_with_invalid_cohort_filter_condition(self, *args): self.team.app_urls = ["https://example.com"] @@ -2406,10 +2390,9 @@ def test_flag_with_invalid_cohort_filter_condition(self, *args): created_by=self.user, ) - with self.assertNumQueries(5): - response = self._post_decide(api_version=3, distinct_id=person1_distinct_id) - self.assertEqual(response.json()["featureFlags"], {"cohort-flag": False}) - self.assertEqual(response.json()["errorsWhileComputingFlags"], False) + response = self._post_decide(api_version=3, distinct_id=person1_distinct_id, assert_num_queries=5) + self.assertEqual(response.json()["featureFlags"], {"cohort-flag": False}) + self.assertEqual(response.json()["errorsWhileComputingFlags"], False) def test_flag_with_invalid_but_safe_cohort_filter_condition(self, *args): self.team.app_urls = ["https://example.com"] @@ -2477,10 +2460,9 @@ def test_flag_with_invalid_but_safe_cohort_filter_condition(self, *args): created_by=self.user, ) - with self.assertNumQueries(5): - response = self._post_decide(api_version=3, distinct_id=person1_distinct_id) - self.assertEqual(response.json()["featureFlags"], {"cohort-flag": True}) - self.assertEqual(response.json()["errorsWhileComputingFlags"], False) + response = self._post_decide(api_version=3, distinct_id=person1_distinct_id, assert_num_queries=5) + self.assertEqual(response.json()["featureFlags"], {"cohort-flag": True}) + self.assertEqual(response.json()["errorsWhileComputingFlags"], False) def test_flag_with_unknown_cohort(self, *args): self.team.app_urls = ["https://example.com"] @@ -2508,10 +2490,9 @@ def test_flag_with_unknown_cohort(self, *args): created_by=self.user, ) - with self.assertNumQueries(6): - response = self._post_decide(api_version=3, distinct_id="example_id_1") - self.assertEqual(response.json()["featureFlags"], {"cohort-flag": False, "simple-flag": True}) - self.assertEqual(response.json()["errorsWhileComputingFlags"], False) + response = self._post_decide(api_version=3, distinct_id="example_id_1", assert_num_queries=6) + self.assertEqual(response.json()["featureFlags"], {"cohort-flag": False, "simple-flag": True}) + self.assertEqual(response.json()["errorsWhileComputingFlags"], False) def test_flag_with_multiple_complex_unknown_cohort(self, *args): self.team.app_urls = ["https://example.com"] @@ -2655,24 +2636,23 @@ def test_flag_with_multiple_complex_unknown_cohort(self, *args): created_by=self.user, ) - with self.assertNumQueries(8): - # Each invalid cohort is queried only once - # 1. Select all valid cohorts - # 2. Select 99999 cohort - # 3. Select deleted cohort - # 4. Select cohort from other team - response = self._post_decide(api_version=3, distinct_id="example_id_1") - self.assertEqual( - response.json()["featureFlags"], - { - "cohort-flag": False, - "simple-flag": True, - "cohort-flag-2": False, - "cohort-flag-3": False, - "cohort-flag-4": True, - }, - ) - self.assertEqual(response.json()["errorsWhileComputingFlags"], False) + # Each invalid cohort is queried only once + # 1. Select all valid cohorts + # 2. Select 99999 cohort + # 3. Select deleted cohort + # 4. Select cohort from other team + response = self._post_decide(api_version=3, distinct_id="example_id_1", assert_num_queries=8) + self.assertEqual( + response.json()["featureFlags"], + { + "cohort-flag": False, + "simple-flag": True, + "cohort-flag-2": False, + "cohort-flag-3": False, + "cohort-flag-4": True, + }, + ) + self.assertEqual(response.json()["errorsWhileComputingFlags"], False) @snapshot_postgres_queries def test_flag_with_behavioural_cohorts(self, *args): @@ -2711,15 +2691,13 @@ def test_flag_with_behavioural_cohorts(self, *args): created_by=self.user, ) - with self.assertNumQueries(6): - response = self._post_decide(api_version=3, distinct_id="example_id_1") - self.assertEqual(response.json()["featureFlags"], {}) - self.assertEqual(response.json()["errorsWhileComputingFlags"], True) + response = self._post_decide(api_version=3, distinct_id="example_id_1", assert_num_queries=6) + self.assertEqual(response.json()["featureFlags"], {}) + self.assertEqual(response.json()["errorsWhileComputingFlags"], True) - with self.assertNumQueries(6): - response = self._post_decide(api_version=3, distinct_id="another_id") - self.assertEqual(response.json()["featureFlags"], {}) - self.assertEqual(response.json()["errorsWhileComputingFlags"], True) + response = self._post_decide(api_version=3, distinct_id="another_id", assert_num_queries=6) + self.assertEqual(response.json()["featureFlags"], {}) + self.assertEqual(response.json()["errorsWhileComputingFlags"], True) def test_personal_api_key_without_project_id(self, *args): key_value = generate_random_token_personal() @@ -2875,20 +2853,21 @@ def test_geoip_disable(self, *args): }, ) - with self.assertNumQueries(4): - geoip_not_disabled_res = self._post_decide(api_version=3, ip=australia_ip, geoip_disable=False) - geoip_disabled_res = self._post_decide(api_version=3, ip=australia_ip, geoip_disable=True) + geoip_not_disabled_res = self._post_decide( + api_version=3, ip=australia_ip, geoip_disable=False, assert_num_queries=0 + ) + geoip_disabled_res = self._post_decide(api_version=3, ip=australia_ip, geoip_disable=True, assert_num_queries=4) - # person has geoip_country_name set to India, but australia-feature is true, because geoip resolution of current IP is enabled - self.assertEqual( - geoip_not_disabled_res.json()["featureFlags"], - {"australia-feature": True, "india-feature": False}, - ) - # person has geoip_country_name set to India, and australia-feature is false, because geoip resolution of current IP is disabled - self.assertEqual( - geoip_disabled_res.json()["featureFlags"], - {"australia-feature": False, "india-feature": True}, - ) + # person has geoip_country_name set to India, but australia-feature is true, because geoip resolution of current IP is enabled + self.assertEqual( + geoip_not_disabled_res.json()["featureFlags"], + {"australia-feature": True, "india-feature": False}, + ) + # person has geoip_country_name set to India, and australia-feature is false, because geoip resolution of current IP is disabled + self.assertEqual( + geoip_disabled_res.json()["featureFlags"], + {"australia-feature": False, "india-feature": True}, + ) # test for falsy/truthy values geoip_not_disabled_res = self._post_decide(api_version=3, ip=australia_ip, geoip_disable="0") @@ -2962,9 +2941,8 @@ def test_disable_flags(self, *args): }, ) - with self.assertNumQueries(0): - flag_disabled_res = self._post_decide(api_version=3, ip=australia_ip, disable_flags=True) - self.assertEqual(flag_disabled_res.json()["featureFlags"], {}) + flag_disabled_res = self._post_decide(api_version=3, ip=australia_ip, disable_flags=True, assert_num_queries=0) + self.assertEqual(flag_disabled_res.json()["featureFlags"], {}) # test for falsy/truthy values flags_not_disabled_res = self._post_decide(api_version=3, ip=australia_ip, disable_flags="0") @@ -3014,30 +2992,30 @@ def test_decide_doesnt_error_out_when_database_is_down(self, *args): {"endpoint": "/e/"}, ) - # now database is down - with connection.execute_wrapper(QueryTimeoutWrapper()): - response = self._post_decide(api_version=2, origin="https://random.example.com").json() + response = self._post_decide( + api_version=2, origin="https://random.example.com", simulate_database_timeout=True + ).json() - self.assertEqual( - response["sessionRecording"], - make_session_recording_decide_response( - { - "sampleRate": "0.20", - } - ), - ) + self.assertEqual( + response["sessionRecording"], + make_session_recording_decide_response( + { + "sampleRate": "0.20", + } + ), + ) - self.assertEqual(response["supportedCompression"], ["gzip", "gzip-js"]) - self.assertEqual(response["siteApps"], []) - self.assertEqual( - response["capturePerformance"], - {"network_timing": True, "web_vitals": False, "web_vitals_allowed_metrics": None}, - ) - self.assertEqual( - response["autocaptureExceptions"], - {"endpoint": "/e/"}, - ) - self.assertEqual(response["featureFlags"], {}) + self.assertEqual(response["supportedCompression"], ["gzip", "gzip-js"]) + self.assertEqual(response["siteApps"], []) + self.assertEqual( + response["capturePerformance"], + {"network_timing": True, "web_vitals": False, "web_vitals_allowed_metrics": None}, + ) + self.assertEqual( + response["autocaptureExceptions"], + {"endpoint": "/e/"}, + ) + self.assertEqual(response["featureFlags"], {}) def test_decide_with_json_and_numeric_distinct_ids(self, *args): self.client.logout() @@ -3070,44 +3048,41 @@ def test_decide_with_json_and_numeric_distinct_ids(self, *args): self._post_decide(api_version=2, distinct_id="a") # caching flag definitions mean fewer queries - with self.assertNumQueries(4): - response = self._post_decide(api_version=2, distinct_id=12345) - self.assertEqual(response.json()["featureFlags"], {"random-flag": True}) + response = self._post_decide(api_version=2, distinct_id=12345, assert_num_queries=4) + self.assertEqual(response.json()["featureFlags"], {"random-flag": True}) - with self.assertNumQueries(4): - response = self._post_decide( - api_version=2, - distinct_id={ - "id": 33040, - "shopify_domain": "xxx.myshopify.com", - "shopify_token": "shpat_xxxx", - "created_at": "2023-04-17T08:55:34.624Z", - "updated_at": "2023-04-21T08:43:34.479", - }, - ) - self.assertEqual( - response.json()["featureFlags"], - {"random-flag": True, "filer-by-property": True}, - ) + response = self._post_decide( + api_version=2, + distinct_id={ + "id": 33040, + "shopify_domain": "xxx.myshopify.com", + "shopify_token": "shpat_xxxx", + "created_at": "2023-04-17T08:55:34.624Z", + "updated_at": "2023-04-21T08:43:34.479", + }, + assert_num_queries=4, + ) + self.assertEqual( + response.json()["featureFlags"], + {"random-flag": True, "filer-by-property": True}, + ) - with self.assertNumQueries(4): - response = self._post_decide( - api_version=2, - distinct_id="{'id': 33040, 'shopify_domain': 'xxx.myshopify.com', 'shopify_token': 'shpat_xxxx', 'created_at': '2023-04-17T08:55:34.624Z', 'updated_at': '2023-04-21T08:43:34.479'", - ) - self.assertEqual(response.json()["featureFlags"], {"random-flag": True}) + response = self._post_decide( + api_version=2, + distinct_id="{'id': 33040, 'shopify_domain': 'xxx.myshopify.com', 'shopify_token': 'shpat_xxxx', 'created_at': '2023-04-17T08:55:34.624Z', 'updated_at': '2023-04-21T08:43:34.479'", + assert_num_queries=4, + ) + self.assertEqual(response.json()["featureFlags"], {"random-flag": True}) - with self.assertNumQueries(4): - response = self._post_decide(api_version=2, distinct_id={"x": "y"}) - self.assertEqual( - response.json()["featureFlags"], - {"random-flag": True, "filer-by-property": True}, - ) + response = self._post_decide(api_version=2, distinct_id={"x": "y"}, assert_num_queries=4) + self.assertEqual( + response.json()["featureFlags"], + {"random-flag": True, "filer-by-property": True}, + ) - with self.assertNumQueries(4): - response = self._post_decide(api_version=2, distinct_id={"x": "z"}) - self.assertEqual(response.json()["featureFlags"], {"random-flag": True}) - # need to pass in exact string to get the property flag + response = self._post_decide(api_version=2, distinct_id={"x": "z"}, assert_num_queries=4) + self.assertEqual(response.json()["featureFlags"], {"random-flag": True}) + # need to pass in exact string to get the property flag def test_rate_limits(self, *args): with self.settings( @@ -3606,16 +3581,48 @@ def test_decide_element_chain_as_string(self, *args): def test_decide_default_identified_only(self, *args): self.client.logout() - with self.settings(DEFAULT_IDENTIFIED_ONLY_TEAM_ID_MIN=str(1000000)): - response = self._post_decide(api_version=3) - self.assertEqual(response.status_code, 200) - self.assertTrue("defaultIdentifiedOnly" in response.json()) - self.assertFalse(response.json()["defaultIdentifiedOnly"]) - team_id = self.team.id - with self.settings(DEFAULT_IDENTIFIED_ONLY_TEAM_ID_MIN=str(team_id)): + response = self._post_decide(api_version=3) + self.assertEqual(response.status_code, 200) + self.assertTrue("defaultIdentifiedOnly" in response.json()) + self.assertTrue(response.json()["defaultIdentifiedOnly"]) + + +class TestDecideRemoteConfig(TestDecide): + use_remote_config = True + + def test_definitely_loads_via_remote_config(self, *args): + # NOTE: This is a sanity check test that we aren't just using the old decide logic + + with patch.object( + RemoteConfig, "get_config_via_token", wraps=RemoteConfig.get_config_via_token + ) as wrapped_get_config_via_token: response = self._post_decide(api_version=3) - self.assertEqual(response.status_code, 200) - self.assertTrue(response.json()["defaultIdentifiedOnly"]) + wrapped_get_config_via_token.assert_called_once() + + # NOTE: If this changes it indicates something is wrong as we should keep this exact format + # for backwards compatibility + assert response.json() == snapshot( + { + "supportedCompression": ["gzip", "gzip-js"], + "captureDeadClicks": False, + "capturePerformance": {"network_timing": True, "web_vitals": False, "web_vitals_allowed_metrics": None}, + "autocapture_opt_out": False, + "autocaptureExceptions": False, + "analytics": {"endpoint": "/i/v0/e/"}, + "elementsChainAsString": True, + "sessionRecording": False, + "heatmaps": False, + "surveys": False, + "defaultIdentifiedOnly": True, + "siteApps": [], + "isAuthenticated": False, + "toolbarParams": {}, + "config": {"enable_collect_everything": True}, + "featureFlags": {}, + "errorsWhileComputingFlags": False, + "featureFlagPayloads": {}, + } + ) class TestDatabaseCheckForDecide(BaseTest, QueryMatchingTest): diff --git a/posthog/api/test/test_early_access_feature.py b/posthog/api/test/test_early_access_feature.py index 311fbae3cb1d3e..525490c4f949d1 100644 --- a/posthog/api/test/test_early_access_feature.py +++ b/posthog/api/test/test_early_access_feature.py @@ -1,3 +1,4 @@ +import json from unittest.mock import ANY from rest_framework import status @@ -7,6 +8,7 @@ from posthog.models.early_access_feature import EarlyAccessFeature from posthog.models import FeatureFlag, Person +from posthog.models.team.team_caching import set_team_in_cache from posthog.test.base import ( APIBaseTest, BaseTest, @@ -631,6 +633,109 @@ def test_early_access_features(self): ], ) + @snapshot_postgres_queries + def test_early_access_features_with_pre_env_cached_team(self): + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "example@posthog.com"}, + ) + + # This is precisely what the `set_team_in_cache()` would have set on Dec 9, 2024 + cache.set( + f"team_token:{self.team.api_token}", + json.dumps( + { + # Important: this serialization doesn't have `project_id`! It wasn't always part of CachingTeamSerializer + "id": self.team.id, + "uuid": str(self.team.uuid), + "name": self.team.name, + "api_token": self.team.api_token, + } + ), + ) + feature_flag = FeatureFlag.objects.create( + team=self.team, + name=f"Feature Flag for Feature Sprocket", + key="sprocket", + rollout_percentage=0, + created_by=self.user, + ) + feature = EarlyAccessFeature.objects.create( + team=self.team, + name="Sprocket", + description="A fancy new sprocket.", + stage="beta", + feature_flag=feature_flag, + ) + + self.client.logout() + + with self.assertNumQueries(1): + response = self._get_features() + self.assertEqual(response.status_code, 200) + self.assertEqual(response.get("access-control-allow-origin"), "http://127.0.0.1:8000") + + self.assertListEqual( + response.json()["earlyAccessFeatures"], + [ + { + "id": str(feature.id), + "name": "Sprocket", + "description": "A fancy new sprocket.", + "stage": "beta", + "documentationUrl": "", + "flagKey": "sprocket", + } + ], + ) + + @snapshot_postgres_queries + def test_early_access_features_with_cached_team(self): + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "example@posthog.com"}, + ) + + # Slightly dirty to use the actual implementation of `set_team_in_cache()` here, but this tests how things are + set_team_in_cache(self.team.api_token) + feature_flag = FeatureFlag.objects.create( + team=self.team, + name=f"Feature Flag for Feature Sprocket", + key="sprocket", + rollout_percentage=0, + created_by=self.user, + ) + feature = EarlyAccessFeature.objects.create( + team=self.team, + name="Sprocket", + description="A fancy new sprocket.", + stage="beta", + feature_flag=feature_flag, + ) + + self.client.logout() + + with self.assertNumQueries(1): + response = self._get_features() + self.assertEqual(response.status_code, 200) + self.assertEqual(response.get("access-control-allow-origin"), "http://127.0.0.1:8000") + + self.assertListEqual( + response.json()["earlyAccessFeatures"], + [ + { + "id": str(feature.id), + "name": "Sprocket", + "description": "A fancy new sprocket.", + "stage": "beta", + "documentationUrl": "", + "flagKey": "sprocket", + } + ], + ) + def test_early_access_features_beta_only(self): Person.objects.create( team=self.team, diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py index 86280d76fd5fa9..efc67f79e3f40d 100644 --- a/posthog/api/test/test_feature_flag.py +++ b/posthog/api/test/test_feature_flag.py @@ -1463,73 +1463,91 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): self.assertEqual(len(tiles), 2) self.assertEqual(tiles[0].insight.name, "Feature Flag Called Total Volume") self.assertEqual( - tiles[0].insight.filters, + tiles[0].insight.query, { - "events": [ - { - "id": "$feature_flag_called", - "name": "$feature_flag_called", - "type": "events", - } - ], - "display": "ActionsLineGraph", - "insight": "TRENDS", - "interval": "day", - "breakdown": "$feature_flag_response", - "date_from": "-30d", - "properties": { - "type": "AND", - "values": [ - { - "type": "AND", - "values": [ - { - "key": "$feature_flag", - "type": "event", - "value": "alpha-feature", - } - ], - } - ], + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "series": [{"kind": "EventsNode", "name": "$feature_flag_called", "event": "$feature_flag_called"}], + "interval": "day", + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + { + "key": "$feature_flag", + "type": "event", + "value": "alpha-feature", + "operator": "exact", + } + ], + } + ], + }, + "trendsFilter": { + "display": "ActionsLineGraph", + "showLegend": False, + "yAxisScaleType": "linear", + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "showPercentStackView": False, + "aggregationAxisFormat": "numeric", + "showAlertThresholdLines": False, + }, + "breakdownFilter": {"breakdown": "$feature_flag_response", "breakdown_type": "event"}, + "filterTestAccounts": False, }, - "breakdown_type": "event", - "filter_test_accounts": False, }, ) self.assertEqual(tiles[1].insight.name, "Feature Flag calls made by unique users per variant") self.assertEqual( - tiles[1].insight.filters, + tiles[1].insight.query, { - "events": [ - { - "id": "$feature_flag_called", - "math": "dau", - "name": "$feature_flag_called", - "type": "events", - } - ], - "display": "ActionsTable", - "insight": "TRENDS", - "interval": "day", - "breakdown": "$feature_flag_response", - "date_from": "-30d", - "properties": { - "type": "AND", - "values": [ + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "series": [ { - "type": "AND", - "values": [ - { - "key": "$feature_flag", - "type": "event", - "value": "alpha-feature", - } - ], + "kind": "EventsNode", + "math": "dau", + "name": "$feature_flag_called", + "event": "$feature_flag_called", } ], + "interval": "day", + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + { + "key": "$feature_flag", + "type": "event", + "value": "alpha-feature", + "operator": "exact", + } + ], + } + ], + }, + "trendsFilter": { + "display": "ActionsTable", + "showLegend": False, + "yAxisScaleType": "linear", + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "showPercentStackView": False, + "aggregationAxisFormat": "numeric", + "showAlertThresholdLines": False, + }, + "breakdownFilter": {"breakdown": "$feature_flag_response", "breakdown_type": "event"}, + "filterTestAccounts": False, }, - "breakdown_type": "event", - "filter_test_accounts": False, }, ) @@ -1557,153 +1575,191 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): self.assertEqual(len(tiles), 4) self.assertEqual(tiles[0].insight.name, "Feature Flag Called Total Volume") self.assertEqual( - tiles[0].insight.filters, + tiles[0].insight.query, { - "events": [ - { - "id": "$feature_flag_called", - "name": "$feature_flag_called", - "type": "events", - } - ], - "display": "ActionsLineGraph", - "insight": "TRENDS", - "interval": "day", - "breakdown": "$feature_flag_response", - "date_from": "-30d", - "properties": { - "type": "AND", - "values": [ - { - "type": "AND", - "values": [ - { - "key": "$feature_flag", - "type": "event", - "value": "alpha-feature", - } - ], - } - ], + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "series": [{"kind": "EventsNode", "name": "$feature_flag_called", "event": "$feature_flag_called"}], + "interval": "day", + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + { + "key": "$feature_flag", + "type": "event", + "value": "alpha-feature", + "operator": "exact", + } + ], + } + ], + }, + "trendsFilter": { + "display": "ActionsLineGraph", + "showLegend": False, + "yAxisScaleType": "linear", + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "showPercentStackView": False, + "aggregationAxisFormat": "numeric", + "showAlertThresholdLines": False, + }, + "breakdownFilter": {"breakdown": "$feature_flag_response", "breakdown_type": "event"}, + "filterTestAccounts": False, }, - "breakdown_type": "event", - "filter_test_accounts": False, }, ) self.assertEqual(tiles[1].insight.name, "Feature Flag calls made by unique users per variant") self.assertEqual( - tiles[1].insight.filters, + tiles[1].insight.query, { - "events": [ - { - "id": "$feature_flag_called", - "math": "dau", - "name": "$feature_flag_called", - "type": "events", - } - ], - "display": "ActionsTable", - "insight": "TRENDS", - "interval": "day", - "breakdown": "$feature_flag_response", - "date_from": "-30d", - "properties": { - "type": "AND", - "values": [ + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "series": [ { - "type": "AND", - "values": [ - { - "key": "$feature_flag", - "type": "event", - "value": "alpha-feature", - } - ], + "kind": "EventsNode", + "math": "dau", + "name": "$feature_flag_called", + "event": "$feature_flag_called", } ], + "interval": "day", + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + { + "key": "$feature_flag", + "type": "event", + "value": "alpha-feature", + "operator": "exact", + } + ], + } + ], + }, + "trendsFilter": { + "display": "ActionsTable", + "showLegend": False, + "yAxisScaleType": "linear", + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "showPercentStackView": False, + "aggregationAxisFormat": "numeric", + "showAlertThresholdLines": False, + }, + "breakdownFilter": {"breakdown": "$feature_flag_response", "breakdown_type": "event"}, + "filterTestAccounts": False, }, - "breakdown_type": "event", - "filter_test_accounts": False, }, ) # enriched insights self.assertEqual(tiles[2].insight.name, "Feature Interaction Total Volume") self.assertEqual( - tiles[2].insight.filters, + tiles[2].insight.query, { - "events": [ - { - "id": "$feature_interaction", - "name": "Feature Interaction - Total", - "type": "events", - }, - { - "id": "$feature_interaction", - "math": "dau", - "name": "Feature Interaction - Unique users", - "type": "events", - }, - ], - "display": "ActionsLineGraph", - "insight": "TRENDS", - "interval": "day", - "date_from": "-30d", - "properties": { - "type": "AND", - "values": [ + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "series": [ + {"kind": "EventsNode", "name": "Feature Interaction - Total", "event": "$feature_interaction"}, { - "type": "AND", - "values": [ - { - "key": "feature_flag", - "type": "event", - "value": "alpha-feature", - } - ], - } + "kind": "EventsNode", + "math": "dau", + "name": "Feature Interaction - Unique users", + "event": "$feature_interaction", + }, ], + "interval": "day", + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + { + "key": "feature_flag", + "type": "event", + "value": "alpha-feature", + "operator": "exact", + } + ], + } + ], + }, + "trendsFilter": { + "display": "ActionsLineGraph", + "showLegend": False, + "yAxisScaleType": "linear", + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "showPercentStackView": False, + "aggregationAxisFormat": "numeric", + "showAlertThresholdLines": False, + }, + "breakdownFilter": {"breakdown_type": "event"}, + "filterTestAccounts": False, }, - "filter_test_accounts": False, }, ) self.assertEqual(tiles[3].insight.name, "Feature Viewed Total Volume") self.assertEqual( - tiles[3].insight.filters, + tiles[3].insight.query, { - "events": [ - { - "id": "$feature_view", - "name": "Feature View - Total", - "type": "events", - }, - { - "id": "$feature_view", - "math": "dau", - "name": "Feature View - Unique users", - "type": "events", - }, - ], - "display": "ActionsLineGraph", - "insight": "TRENDS", - "interval": "day", - "date_from": "-30d", - "properties": { - "type": "AND", - "values": [ + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "series": [ + {"kind": "EventsNode", "name": "Feature View - Total", "event": "$feature_view"}, { - "type": "AND", - "values": [ - { - "key": "feature_flag", - "type": "event", - "value": "alpha-feature", - } - ], - } + "kind": "EventsNode", + "math": "dau", + "name": "Feature View - Unique users", + "event": "$feature_view", + }, ], + "interval": "day", + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + { + "key": "feature_flag", + "type": "event", + "value": "alpha-feature", + "operator": "exact", + } + ], + } + ], + }, + "trendsFilter": { + "display": "ActionsLineGraph", + "showLegend": False, + "yAxisScaleType": "linear", + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "showPercentStackView": False, + "aggregationAxisFormat": "numeric", + "showAlertThresholdLines": False, + }, + "breakdownFilter": {"breakdown_type": "event"}, + "filterTestAccounts": False, }, - "filter_test_accounts": False, }, ) @@ -6287,6 +6343,30 @@ def test_flag_status_reasons(self): FeatureFlagStatus.ACTIVE, ) + # Request status for multivariate flag with a variant set to 100% but no release condition set to 100% + multivariate_flag_rolled_out_release_condition_half_variant = FeatureFlag.objects.create( + name="Multivariate flag with release condition set to 100%, but variants still 50%", + key="multivariate-rolled-out-release-half-variant-flag", + team=self.team, + active=True, + filters={ + "multivariate": { + "variants": [ + {"key": "var1key", "name": "test", "rollout_percentage": 50}, + {"key": "var2key", "name": "control", "rollout_percentage": 50}, + ], + }, + "groups": [ + {"variant": None, "properties": [], "rollout_percentage": 100}, + ], + }, + ) + self.create_feature_flag_called_event(multivariate_flag_rolled_out_release_condition_half_variant.key) + self.assert_expected_response( + multivariate_flag_rolled_out_release_condition_half_variant.id, + FeatureFlagStatus.ACTIVE, + ) + # Request status for multivariate flag with variants set to 100% and a filtered release condition multivariate_flag_rolled_out_variant_rolled_out_filtered_release = FeatureFlag.objects.create( name="Multivariate flag with variant and release condition set to 100%", diff --git a/posthog/api/test/test_hog_function.py b/posthog/api/test/test_hog_function.py index 55788bb058191f..b988b53fdbbfb5 100644 --- a/posthog/api/test/test_hog_function.py +++ b/posthog/api/test/test_hog_function.py @@ -20,6 +20,7 @@ "name": "HogHook", "hog": "fetch(inputs.url, {\n 'headers': inputs.headers,\n 'body': inputs.payload,\n 'method': inputs.method\n});", "type": "destination", + "enabled": True, "inputs_schema": [ {"key": "url", "type": "string", "label": "Webhook URL", "required": True}, {"key": "payload", "type": "json", "label": "JSON Payload", "required": True}, @@ -74,6 +75,7 @@ def _create_slack_function(self, data: Optional[dict] = None): payload = { "name": "Slack", "template_id": template_slack.id, + "type": "destination", "inputs": { "slack_workspace": {"value": 1}, "channel": {"value": "#general"}, @@ -196,7 +198,13 @@ def _filter_expected_keys(self, actual_data, expected_structure): def test_create_hog_function(self, *args): response = self.client.post( f"/api/projects/{self.team.id}/hog_functions/", - data={"name": "Fetch URL", "description": "Test description", "hog": "fetch(inputs.url);", "inputs": {}}, + data={ + "type": "destination", + "name": "Fetch URL", + "description": "Test description", + "hog": "fetch(inputs.url);", + "inputs": {}, + }, ) assert response.status_code == status.HTTP_201_CREATED, response.json() assert response.json()["created_by"]["id"] == self.user.id @@ -218,6 +226,7 @@ def test_create_hog_function(self, *args): "icon_url": None, "template": None, "masking": None, + "mappings": None, "status": {"rating": 0, "state": 0, "tokens": 0}, } @@ -256,6 +265,7 @@ def test_creates_with_template_id(self, *args): "description": "Test description", "hog": "fetch(inputs.url);", "template_id": template_webhook.id, + "type": "destination", }, ) assert response.status_code == status.HTTP_201_CREATED, response.json() @@ -271,6 +281,8 @@ def test_creates_with_template_id(self, *args): "hog": template_webhook.hog, "filters": None, "masking": None, + "mappings": None, + "mapping_templates": None, "sub_templates": response.json()["template"]["sub_templates"], } @@ -362,6 +374,7 @@ def test_inputs_required(self, *args): "inputs_schema": [ {"key": "url", "type": "string", "label": "Webhook URL", "required": True}, ], + "type": "destination", } # Check required res = self.client.post(f"/api/projects/{self.team.id}/hog_functions/", data={**payload}) @@ -382,6 +395,7 @@ def test_inputs_mismatch_type(self, *args): {"key": "dictionary", "type": "dictionary"}, {"key": "boolean", "type": "boolean"}, ], + "type": "destination", } bad_inputs = { @@ -414,6 +428,7 @@ def test_secret_inputs_not_returned(self, *args): "value": "I AM SECRET", }, }, + "type": "destination", } expectation = { "url": { @@ -453,6 +468,7 @@ def test_secret_inputs_not_returned(self, *args): def test_secret_inputs_not_updated_if_not_changed(self, *args): payload = { + "type": "destination", "name": "Fetch URL", "hog": "fetch(inputs.url);", "inputs_schema": [ @@ -489,6 +505,7 @@ def test_secret_inputs_not_updated_if_not_changed(self, *args): def test_secret_inputs_updated_if_changed(self, *args): payload = { + "type": "destination", "name": "Fetch URL", "hog": "fetch(inputs.url);", "inputs_schema": [ @@ -580,6 +597,7 @@ def test_generates_hog_bytecode(self, *args): response = self.client.post( f"/api/projects/{self.team.id}/hog_functions/", data={ + "type": "destination", "name": "Fetch URL", "hog": "let i := 0;\nwhile(i < 3) {\n i := i + 1;\n fetch(inputs.url, {\n 'headers': {\n 'x-count': f'{i}'\n },\n 'body': inputs.payload,\n 'method': inputs.method\n });\n}", }, @@ -790,13 +808,7 @@ def test_loads_status_when_enabled_and_available(self, *args): response = self.client.post( f"/api/projects/{self.team.id}/hog_functions/", - data={ - "name": "Fetch URL", - "description": "Test description", - "hog": "fetch(inputs.url);", - "template_id": template_webhook.id, - "enabled": True, - }, + data=EXAMPLE_FULL, ) assert response.status_code == status.HTTP_201_CREATED, response.json() @@ -810,13 +822,7 @@ def test_does_not_crash_when_status_not_available(self, *args): response = self.client.post( f"/api/projects/{self.team.id}/hog_functions/", - data={ - "name": "Fetch URL", - "description": "Test description", - "hog": "fetch(inputs.url);", - "template_id": template_webhook.id, - "enabled": True, - }, + data=EXAMPLE_FULL, ) assert response.status_code == status.HTTP_201_CREATED, response.json() response = self.client.get(f"/api/projects/{self.team.id}/hog_functions/{response.json()['id']}") @@ -830,7 +836,7 @@ def test_patches_status_on_enabled_update(self, *args): response = self.client.post( f"/api/projects/{self.team.id}/hog_functions/", - data={"name": "Fetch URL", "hog": "fetch(inputs.url);", "enabled": True}, + data={"type": "destination", "name": "Fetch URL", "hog": "fetch(inputs.url);", "enabled": True}, ) id = response.json()["id"] @@ -1055,6 +1061,30 @@ def test_create_hog_function_with_site_destination_type(self): assert response.json()["bytecode"] is None assert "Hello, site_destination" in response.json()["transpiled"] + def test_cannot_modify_type_of_existing_hog_function(self): + response = self.client.post( + f"/api/projects/{self.team.id}/hog_functions/", + data={ + "name": "Site Destination Function", + "hog": "export function onLoad() { console.log('Hello, site_destination'); }", + "type": "site_destination", + }, + ) + + assert response.status_code == status.HTTP_201_CREATED, response.json() + + response = self.client.patch( + f"/api/projects/{self.team.id}/hog_functions/{response.json()['id']}/", + data={"type": "site_app"}, + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST, response.json() + assert response.json() == { + "attr": "type", + "detail": "Cannot modify the type of an existing function", + "code": "invalid_input", + "type": "validation_error", + } + def test_transpiled_field_not_populated_for_other_types(self): response = self.client.post( f"/api/projects/{self.team.id}/hog_functions/", diff --git a/posthog/api/test/test_hog_function_templates.py b/posthog/api/test/test_hog_function_templates.py index 956be4de638a9b..7a9b5150f5acdb 100644 --- a/posthog/api/test/test_hog_function_templates.py +++ b/posthog/api/test/test_hog_function_templates.py @@ -17,6 +17,8 @@ "category": template.category, "filters": template.filters, "masking": template.masking, + "mappings": template.mappings, + "mapping_templates": template.mapping_templates, "icon_url": template.icon_url, } diff --git a/posthog/api/test/test_remote_config.py b/posthog/api/test/test_remote_config.py index e866583faed1bc..c5fb3a53a11739 100644 --- a/posthog/api/test/test_remote_config.py +++ b/posthog/api/test/test_remote_config.py @@ -3,13 +3,20 @@ from rest_framework import status from django.core.cache import cache -from posthog.test.base import APIBaseTest, QueryMatchingTest +from posthog.test.base import APIBaseTest, FuzzyInt, QueryMatchingTest + +# The remote config stuff plus plugin and hog function queries +CONFIG_REFRESH_QUERY_COUNT = 5 class TestRemoteConfig(APIBaseTest, QueryMatchingTest): def setUp(self): self.client.logout() + self.team.recording_domains = ["https://*.example.com"] + self.team.session_recording_opt_in = True + self.team.save() + cache.clear() def test_missing_tokens(self): @@ -30,11 +37,12 @@ def test_invalid_tokens(self): assert response.status_code == status.HTTP_404_NOT_FOUND def test_valid_config(self): - with self.assertNumQueries(3): - response = self.client.get(f"/array/{self.team.api_token}/config") + # Not sure why but there is sometimes one extra query here + with self.assertNumQueries(CONFIG_REFRESH_QUERY_COUNT): + response = self.client.get(f"/array/{self.team.api_token}/config", HTTP_ORIGIN="https://foo.example.com") with self.assertNumQueries(0): - response = self.client.get(f"/array/{self.team.api_token}/config") + response = self.client.get(f"/array/{self.team.api_token}/config", HTTP_ORIGIN="https://foo.example.com") assert response.status_code == status.HTTP_200_OK assert response.headers["Content-Type"] == "application/json" @@ -49,49 +57,85 @@ def test_valid_config(self): "autocaptureExceptions": False, "analytics": {"endpoint": "/i/v0/e/"}, "elementsChainAsString": True, - "sessionRecording": False, + "sessionRecording": { + "endpoint": "/s/", + "consoleLogRecordingEnabled": True, + "recorderVersion": "v2", + "sampleRate": None, + "minimumDurationMilliseconds": None, + "linkedFlag": None, + "networkPayloadCapture": None, + "urlTriggers": [], + "urlBlocklist": [], + "eventTriggers": [], + "scriptConfig": None, + }, "surveys": [], "heatmaps": False, - "defaultIdentifiedOnly": False, + "defaultIdentifiedOnly": True, "siteApps": [], } ) + def test_vary_header_response(self): + response = self.client.get(f"/array/{self.team.api_token}/config", HTTP_ORIGIN="https://foo.example.com") + assert response.status_code == status.HTTP_200_OK, response.json() + assert "Origin" in response.headers["Vary"] + assert "Referer" in response.headers["Vary"] + + def test_different_response_for_other_domains(self): + # Not sure why but there is sometimes one extra query here + with self.assertNumQueries(FuzzyInt(CONFIG_REFRESH_QUERY_COUNT, CONFIG_REFRESH_QUERY_COUNT + 1)): + response = self.client.get(f"/array/{self.team.api_token}/config", HTTP_ORIGIN="https://foo.example.com") + assert response.status_code == status.HTTP_200_OK, response.json() + assert response.json()["sessionRecording"] + + with self.assertNumQueries(0): + response = self.client.get(f"/array/{self.team.api_token}/config", HTTP_ORIGIN="https://foo.example.com") + assert response.status_code == status.HTTP_200_OK, response.json() + assert response.json()["sessionRecording"] + + with self.assertNumQueries(0): + response = self.client.get(f"/array/{self.team.api_token}/config", HTTP_ORIGIN="https://bar.other.com") + assert response.status_code == status.HTTP_200_OK, response.json() + assert not response.json()["sessionRecording"] + def test_valid_config_js(self): - with self.assertNumQueries(3): - response = self.client.get(f"/array/{self.team.api_token}/config.js") + with self.assertNumQueries(CONFIG_REFRESH_QUERY_COUNT): + response = self.client.get(f"/array/{self.team.api_token}/config.js", HTTP_ORIGIN="https://foo.example.com") with self.assertNumQueries(0): - response = self.client.get(f"/array/{self.team.api_token}/config.js") + response = self.client.get(f"/array/{self.team.api_token}/config.js", HTTP_ORIGIN="https://foo.example.com") assert response.status_code == status.HTTP_200_OK assert response.headers["Content-Type"] == "application/javascript" + assert response.content == snapshot( - b'(function() {\n window._POSTHOG_CONFIG = {"token": "token123", "surveys": [], "heatmaps": false, "siteApps": [], "analytics": {"endpoint": "/i/v0/e/"}, "hasFeatureFlags": false, "sessionRecording": false, "captureDeadClicks": false, "capturePerformance": {"web_vitals": false, "network_timing": true, "web_vitals_allowed_metrics": null}, "autocapture_opt_out": false, "supportedCompression": ["gzip", "gzip-js"], "autocaptureExceptions": false, "defaultIdentifiedOnly": false, "elementsChainAsString": true};\n window._POSTHOG_JS_APPS = [];\n})();' + b'(function() {\n window._POSTHOG_CONFIG = {"token": "token123", "supportedCompression": ["gzip", "gzip-js"], "hasFeatureFlags": false, "captureDeadClicks": false, "capturePerformance": {"network_timing": true, "web_vitals": false, "web_vitals_allowed_metrics": null}, "autocapture_opt_out": false, "autocaptureExceptions": false, "analytics": {"endpoint": "/i/v0/e/"}, "elementsChainAsString": true, "sessionRecording": {"endpoint": "/s/", "consoleLogRecordingEnabled": true, "recorderVersion": "v2", "sampleRate": null, "minimumDurationMilliseconds": null, "linkedFlag": null, "networkPayloadCapture": null, "urlTriggers": [], "urlBlocklist": [], "eventTriggers": [], "scriptConfig": null}, "heatmaps": false, "surveys": [], "defaultIdentifiedOnly": true};\n window._POSTHOG_JS_APPS = [];\n})();' ) @patch("posthog.models.remote_config.get_array_js_content", return_value="[MOCKED_ARRAY_JS_CONTENT]") def test_valid_array_js(self, mock_get_array_js_content): - with self.assertNumQueries(3): - response = self.client.get(f"/array/{self.team.api_token}/array.js") + with self.assertNumQueries(CONFIG_REFRESH_QUERY_COUNT): + response = self.client.get(f"/array/{self.team.api_token}/array.js", HTTP_ORIGIN="https://foo.example.com") with self.assertNumQueries(0): - response = self.client.get(f"/array/{self.team.api_token}/array.js") + response = self.client.get(f"/array/{self.team.api_token}/array.js", HTTP_ORIGIN="https://foo.example.com") assert response.status_code == status.HTTP_200_OK assert response.headers["Content-Type"] == "application/javascript" assert response.content assert response.content == snapshot( - b'\n [MOCKED_ARRAY_JS_CONTENT]\n\n (function() {\n window._POSTHOG_CONFIG = {"token": "token123", "surveys": [], "heatmaps": false, "siteApps": [], "analytics": {"endpoint": "/i/v0/e/"}, "hasFeatureFlags": false, "sessionRecording": false, "captureDeadClicks": false, "capturePerformance": {"web_vitals": false, "network_timing": true, "web_vitals_allowed_metrics": null}, "autocapture_opt_out": false, "supportedCompression": ["gzip", "gzip-js"], "autocaptureExceptions": false, "defaultIdentifiedOnly": false, "elementsChainAsString": true};\n window._POSTHOG_JS_APPS = [];\n})();\n ' + b'[MOCKED_ARRAY_JS_CONTENT]\n\n(function() {\n window._POSTHOG_CONFIG = {"token": "token123", "supportedCompression": ["gzip", "gzip-js"], "hasFeatureFlags": false, "captureDeadClicks": false, "capturePerformance": {"network_timing": true, "web_vitals": false, "web_vitals_allowed_metrics": null}, "autocapture_opt_out": false, "autocaptureExceptions": false, "analytics": {"endpoint": "/i/v0/e/"}, "elementsChainAsString": true, "sessionRecording": {"endpoint": "/s/", "consoleLogRecordingEnabled": true, "recorderVersion": "v2", "sampleRate": null, "minimumDurationMilliseconds": null, "linkedFlag": null, "networkPayloadCapture": null, "urlTriggers": [], "urlBlocklist": [], "eventTriggers": [], "scriptConfig": null}, "heatmaps": false, "surveys": [], "defaultIdentifiedOnly": true};\n window._POSTHOG_JS_APPS = [];\n})();' ) # NOT actually testing the content here as it will change dynamically @patch("posthog.models.remote_config.get_array_js_content", return_value="[MOCKED_ARRAY_JS_CONTENT]") def test_valid_array_uses_config_js_cache(self, mock_get_array_js_content): - with self.assertNumQueries(3): - response = self.client.get(f"/array/{self.team.api_token}/config.js") + with self.assertNumQueries(CONFIG_REFRESH_QUERY_COUNT): + response = self.client.get(f"/array/{self.team.api_token}/config.js", HTTP_ORIGIN="https://foo.example.com") with self.assertNumQueries(0): - response = self.client.get(f"/array/{self.team.api_token}/array.js") + response = self.client.get(f"/array/{self.team.api_token}/array.js", HTTP_ORIGIN="https://foo.example.com") assert response.status_code == status.HTTP_200_OK diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py index fd84e884cbec8b..35b0bb1cdc5533 100644 --- a/posthog/api/test/test_survey.py +++ b/posthog/api/test/test_survey.py @@ -2378,6 +2378,7 @@ def test_can_clear_associated_actions(self): assert len(survey.actions.all()) == 0 +@freeze_time("2024-12-12 00:00:00") class TestSurveyResponseSampling(APIBaseTest): def _create_survey_with_sampling_limits( self, @@ -2407,6 +2408,7 @@ def _create_survey_with_sampling_limits( ) response_data = response.json() + assert response.status_code == status.HTTP_201_CREATED, response_data survey = Survey.objects.get(id=response_data["id"]) return survey @@ -2907,8 +2909,7 @@ def test_list_surveys_excludes_description(self): for survey in surveys: assert "description" not in survey, f"Description field should not be present in survey: {survey}" - assert surveys[0]["name"] == "Survey 1" - assert surveys[1]["name"] == "Survey 2" + assert len(surveys) == 2 class TestSurveyAPITokens(PersonalAPIKeysBaseTest, APIBaseTest): diff --git a/posthog/api/test/test_user.py b/posthog/api/test/test_user.py index 4e441679cc7097..0d60d7c9c49927 100644 --- a/posthog/api/test/test_user.py +++ b/posthog/api/test/test_user.py @@ -419,7 +419,7 @@ def test_notifications_sent_when_user_email_is_changed_and_email_available( token = email_verification_token_generator.make_token(self.user) with freeze_time("2020-01-01T21:37:00+00:00"): response = self.client.post( - f"/api/users/@me/verify_email/", + f"/api/users/verify_email/", {"uuid": self.user.uuid, "token": token}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -1054,7 +1054,13 @@ def setUp(self): # prevent throttling of user requests to pass on from one test # to the next cache.clear() - return super().setUp() + super().setUp() + + set_instance_setting("EMAIL_HOST", "localhost") + + self.other_user = self._create_user("otheruser@posthog.com", password="12345678") + assert not self.other_user.is_email_verified + assert not self.other_user.is_email_verified # Email verification request @@ -1062,7 +1068,7 @@ def setUp(self): def test_user_can_request_verification_email(self, mock_capture): set_instance_setting("EMAIL_HOST", "localhost") with self.settings(CELERY_TASK_ALWAYS_EAGER=True, SITE_URL="https://my.posthog.net"): - response = self.client.post(f"/api/users/@me/request_email_verification/", {"uuid": self.user.uuid}) + response = self.client.post(f"/api/users/request_email_verification/", {"uuid": self.user.uuid}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.content.decode(), '{"success":true}') self.assertSetEqual({",".join(outmail.to) for outmail in mail.outbox}, {self.CONFIG_EMAIL}) @@ -1080,7 +1086,7 @@ def test_user_can_request_verification_email(self, mock_capture): reset_link = html_message[link_index : html_message.find('"', link_index)] token = reset_link.replace("https://my.posthog.net/verify_email/", "").replace(f"{self.user.uuid}/", "") - response = self.client.post(f"/api/users/@me/verify_email/", {"uuid": self.user.uuid, "token": token}) + response = self.client.post(f"/api/users/verify_email/", {"uuid": self.user.uuid, "token": token}) self.assertEqual(response.status_code, status.HTTP_200_OK) # check is_email_verified is changed to True @@ -1114,8 +1120,9 @@ def test_user_can_request_verification_email(self, mock_capture): self.assertEqual(mock_capture.call_count, 3) def test_cant_verify_if_email_is_not_configured(self): + set_instance_setting("EMAIL_HOST", "") with self.settings(CELERY_TASK_ALWAYS_EAGER=True): - response = self.client.post(f"/api/users/@me/request_email_verification/", {"uuid": self.user.uuid}) + response = self.client.post(f"/api/users/request_email_verification/", {"uuid": self.user.uuid}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), @@ -1133,7 +1140,7 @@ def test_cant_verify_more_than_six_times(self): for i in range(7): with self.settings(CELERY_TASK_ALWAYS_EAGER=True, SITE_URL="https://my.posthog.net"): response = self.client.post( - f"/api/users/@me/request_email_verification/", + f"/api/users/request_email_verification/", {"uuid": self.user.uuid}, ) if i < 6: @@ -1153,11 +1160,11 @@ def test_cant_verify_more_than_six_times(self): def test_can_validate_email_verification_token(self): token = email_verification_token_generator.make_token(self.user) - response = self.client.post(f"/api/users/@me/verify_email/", {"uuid": self.user.uuid, "token": token}) + response = self.client.post(f"/api/users/verify_email/", {"uuid": self.user.uuid, "token": token}) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_cant_validate_email_verification_token_without_a_token(self): - response = self.client.post(f"/api/users/@me/verify_email/", {"uuid": self.user.uuid}) + response = self.client.post(f"/api/users/verify_email/", {"uuid": self.user.uuid}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), @@ -1183,7 +1190,7 @@ def test_invalid_verification_token_returns_error(self): expired_token, ]: response = self.client.post( - f"/api/users/@me/verify_email/", + f"/api/users/verify_email/", {"uuid": self.user.uuid, "token": token}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -1197,6 +1204,92 @@ def test_invalid_verification_token_returns_error(self): }, ) + def test_can_only_validate_email_token_one_time(self): + token = email_verification_token_generator.make_token(self.user) + response = self.client.post(f"/api/users/verify_email/", {"uuid": self.user.uuid, "token": token}) + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response = self.client.post(f"/api/users/verify_email/", {"uuid": self.user.uuid, "token": token}) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.json(), + { + "type": "validation_error", + "code": "invalid_token", + "detail": "This verification token is invalid or has expired.", + "attr": "token", + }, + ) + + def test_email_verification_logs_in_user(self): + token = email_verification_token_generator.make_token(self.user) + + self.client.logout() + assert self.client.get("/api/users/@me/").status_code == 401 + session_user_id = self.client.session.get("_auth_user_id") + assert session_user_id is None + + # NOTE: Posting sets the session user id but doesn't log in the test client hence we just check the session id + self.client.post(f"/api/users/verify_email/", {"uuid": self.user.uuid, "token": token}) + session_user_id = self.client.session.get("_auth_user_id") + assert session_user_id == str(self.user.id) + + def test_email_verification_logs_in_correctuser(self): + other_token = email_verification_token_generator.make_token(self.other_user) + self.client.logout() + assert self.client.session.get("_auth_user_id") is None + + # NOTE: The user id in path should basically be ignored + self.client.post(f"/api/users/verify_email/", {"uuid": self.other_user.uuid, "token": other_token}) + session_user_id = self.client.session.get("_auth_user_id") + assert session_user_id == str(self.other_user.id) + + def test_email_verification_does_not_apply_to_current_logged_in_user(self): + other_token = email_verification_token_generator.make_token(self.other_user) + + res = self.client.post(f"/api/users/verify_email/", {"uuid": self.other_user.uuid, "token": other_token}) + assert res.status_code == status.HTTP_200_OK + self.user.refresh_from_db() + self.other_user.refresh_from_db() + # Should now be logged in as other user + assert self.client.session.get("_auth_user_id") == str(self.other_user.id) + assert not self.user.is_email_verified + assert self.other_user.is_email_verified + + def test_email_verification_fails_if_using_other_accounts_token(self): + token = email_verification_token_generator.make_token(self.user) + other_token = email_verification_token_generator.make_token(self.other_user) + self.client.logout() + + assert ( + self.client.post(f"/api/users/verify_email/", {"uuid": self.other_user.uuid, "token": token}).status_code + == status.HTTP_400_BAD_REQUEST + ) + + assert ( + self.client.post(f"/api/users/verify_email/", {"uuid": self.user.uuid, "token": other_token}).status_code + == status.HTTP_400_BAD_REQUEST + ) + + def test_does_not_apply_pending_email_for_old_tokens(self): + self.client.logout() + + token = email_verification_token_generator.make_token(self.user) + self.user.pending_email = "new@posthog.com" + self.user.save() + + response = self.client.post(f"/api/users/verify_email/", {"uuid": self.user.uuid, "token": token}) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert self.user.email != "new@posthog.com" + assert self.user.pending_email == "new@posthog.com" + + token = email_verification_token_generator.make_token(self.user) + response = self.client.post(f"/api/users/verify_email/", {"uuid": self.user.uuid, "token": token}) + assert response.status_code == status.HTTP_200_OK + self.user.refresh_from_db() + assert self.user.email == "new@posthog.com" + assert self.user.pending_email is None + class TestUserTwoFactor(APIBaseTest): def setUp(self): diff --git a/posthog/api/user.py b/posthog/api/user.py index 7fc843b4a5be8f..0610abf047df7d 100644 --- a/posthog/api/user.py +++ b/posthog/api/user.py @@ -382,10 +382,11 @@ def get_serializer_context(self): "user_permissions": UserPermissions(cast(User, self.request.user)), } - @action(methods=["POST"], detail=True, permission_classes=[AllowAny]) + @action(methods=["POST"], detail=False, permission_classes=[AllowAny]) def verify_email(self, request, **kwargs): token = request.data["token"] if "token" in request.data else None user_uuid = request.data["uuid"] + if not token: raise serializers.ValidationError({"token": ["This field is required."]}, code="required") @@ -421,7 +422,7 @@ def verify_email(self, request, **kwargs): @action( methods=["POST"], - detail=True, + detail=False, permission_classes=[AllowAny], throttle_classes=[UserEmailVerificationThrottle], ) diff --git a/posthog/api/utils.py b/posthog/api/utils.py index 514534990a8f0a..79c4c1028d1d24 100644 --- a/posthog/api/utils.py +++ b/posthog/api/utils.py @@ -1,4 +1,5 @@ import json +from django.http import HttpRequest from rest_framework.decorators import action as drf_action from functools import wraps from posthog.api.documentation import extend_schema @@ -427,6 +428,25 @@ def parse_domain(url: Any) -> Optional[str]: return urlparse(url).hostname +def on_permitted_recording_domain(permitted_domains: list[str], request: HttpRequest) -> bool: + origin = parse_domain(request.headers.get("Origin")) + referer = parse_domain(request.headers.get("Referer")) + + user_agent = request.META.get("HTTP_USER_AGENT") + + is_authorized_web_client: bool = hostname_in_allowed_url_list( + permitted_domains, origin + ) or hostname_in_allowed_url_list(permitted_domains, referer) + # TODO this is a short term fix for beta testers + # TODO we will match on the app identifier in the origin instead and allow users to auth those + is_authorized_mobile_client: bool = user_agent is not None and any( + keyword in user_agent + for keyword in ["posthog-android", "posthog-ios", "posthog-react-native", "posthog-flutter"] + ) + + return is_authorized_web_client or is_authorized_mobile_client + + # By default, DRF spectacular uses the serializer of the view as the response format for actions. However, most actions don't return a version of the model, but something custom. This function removes the response from all actions in the documentation. def action(methods=None, detail=None, url_path=None, url_name=None, responses=None, **kwargs): """ diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py index 59b217b4fc8f26..d17bb3b1b69c30 100644 --- a/posthog/batch_exports/service.py +++ b/posthog/batch_exports/service.py @@ -794,3 +794,19 @@ async def aupdate_batch_export_backfill_status(backfill_id: UUID, status: str) - raise ValueError(f"BatchExportBackfill with id {backfill_id} not found.") return await model.aget() + + +async def aupdate_records_total_count( + batch_export_id: UUID, interval_start: dt.datetime, interval_end: dt.datetime, count: int +) -> int: + """Update the expected records count for a set of batch export runs. + + Typically, there is one batch export run per batch export interval, however + there could be multiple if data has been backfilled. + """ + rows_updated = await BatchExportRun.objects.filter( + batch_export_id=batch_export_id, + data_interval_start=interval_start, + data_interval_end=interval_end, + ).aupdate(records_total_count=count) + return rows_updated diff --git a/posthog/batch_exports/sql.py b/posthog/batch_exports/sql.py index baa0216afdbbcb..9a7fd0cea95aa4 100644 --- a/posthog/batch_exports/sql.py +++ b/posthog/batch_exports/sql.py @@ -318,3 +318,22 @@ SETTINGS optimize_aggregation_in_order=1 ) """ + +# TODO: is this the best query to use? +EVENT_COUNT_BY_INTERVAL = """ +SELECT + toStartOfInterval(_inserted_at, INTERVAL {interval}) AS interval_start, + interval_start + INTERVAL {interval} AS interval_end, + COUNT(*) as total_count +FROM + events_batch_export_recent( + team_id={team_id}, + interval_start={overall_interval_start}, + interval_end={overall_interval_end}, + include_events={include_events}::Array(String), + exclude_events={exclude_events}::Array(String) + ) AS events +GROUP BY interval_start +ORDER BY interval_start desc +SETTINGS max_replica_delay_for_distributed_queries=1 +""" diff --git a/posthog/cdp/filters.py b/posthog/cdp/filters.py index 3d34a4c4c29fc5..b3ea85d49f45bd 100644 --- a/posthog/cdp/filters.py +++ b/posthog/cdp/filters.py @@ -59,10 +59,9 @@ def hog_function_filters_to_expr(filters: dict, team: Team, actions: dict[int, A all_filters_exprs.append(ast.And(exprs=exprs)) if all_filters_exprs: - final_expr = ast.Or(exprs=all_filters_exprs) - return final_expr - else: - return ast.Constant(value=True) + return ast.Or(exprs=all_filters_exprs) + + return ast.Constant(value=True) def filter_action_ids(filters: Optional[dict]) -> list[int]: diff --git a/posthog/cdp/site_functions.py b/posthog/cdp/site_functions.py index 3896f4f73515e4..690dc136ea5776 100644 --- a/posthog/cdp/site_functions.py +++ b/posthog/cdp/site_functions.py @@ -8,8 +8,7 @@ def get_transpiled_function(hog_function: HogFunction) -> str: - # Wrap in IIFE = Immediately Invoked Function Expression = to avoid polluting global scope - response = "(function() {\n" + response = "" # Build the inputs in three parts: # 1) a simple object with constants/scalars @@ -32,12 +31,6 @@ def get_transpiled_function(hog_function: HogFunction) -> str: else: inputs_object.append(f"{key_string}: {json.dumps(value)}") - # Convert the filters to code - filters_expr = hog_function_filters_to_expr(hog_function.filters or {}, hog_function.team, {}) - filters_code = compiler.visit(filters_expr) - # Start with the STL functions - response += compiler.get_stl_code() + "\n" - # A function to calculate the inputs from globals. If "initial" is true, no errors are logged. response += "function buildInputs(globals, initial) {\n" @@ -59,6 +52,40 @@ def get_transpiled_function(hog_function: HogFunction) -> str: response += f"const source = {transpile(hog_function.hog, 'site')}();" + # Convert the global filters to code + filters_expr = hog_function_filters_to_expr(hog_function.filters or {}, hog_function.team, {}) + filters_code = compiler.visit(filters_expr) + + # Convert the mappings to code + mapping_code = "" + for mapping in hog_function.mappings or []: + mapping_inputs = mapping.get("inputs", {}) + mapping_inputs_schema = mapping.get("inputs_schema", []) + mapping_filters_expr = hog_function_filters_to_expr(mapping.get("filters", {}) or {}, hog_function.team, {}) + mapping_filters_code = compiler.visit(mapping_filters_expr) + + mapping_code += f"if ({mapping_filters_code}) {{" + mapping_code += "(function (){" # IIFE so that the code below has different globals than the filters above + mapping_code += "const newInputs = structuredClone(inputs); const __getGlobal = (key) => key === 'inputs' ? newInputs : globals[key];\n" + + for schema in mapping_inputs_schema: + if "key" in schema and schema["key"] not in mapping_inputs: + mapping_inputs[schema["key"]] = {"value": schema.get("default", None)} + + for key, input in mapping_inputs.items(): + value = input.get("value") if input is not None else schema.get("default", None) + key_string = json.dumps(str(key) or "") + if (isinstance(value, str) and "{" in value) or isinstance(value, dict) or isinstance(value, list): + base_code = transpile_template_code(value, compiler) + mapping_code += ( + f"try {{ newInputs[{json.dumps(key)}] = {base_code}; }} catch (e) {{ console.error(e) }}\n" + ) + else: + mapping_code += f"newInputs[{json.dumps(key)}] = {json.dumps(value)};\n" + mapping_code += "source.onEvent({ inputs: newInputs, posthog });" + mapping_code += "})();" + mapping_code += "}\n" + # We are exposing an init function which is what the client will use to actually run this setup code. # The return includes any extra methods that the client might need to use - so far just processEvent response += ( @@ -73,7 +100,10 @@ def get_transpiled_function(hog_function: HogFunction) -> str: const filterMatches = """ + filters_code + """; - if (filterMatches) { source.onEvent({ ...globals, inputs, posthog }); } + if (!filterMatches) { return; } + """ + + (mapping_code or ";") + + """ } } @@ -81,7 +111,12 @@ def get_transpiled_function(hog_function: HogFunction) -> str: const posthog = config.posthog; const callback = config.callback; if ('onLoad' in source) { - const r = source.onLoad({ inputs: buildInputs({}, true), posthog: posthog }); + const globals = { + person: { + properties: posthog.get_property('$stored_person_properties'), + } + } + const r = source.onLoad({ inputs: buildInputs(globals, true), posthog: posthog }); if (r && typeof r.then === 'function' && typeof r.finally === 'function') { r.catch(() => callback(false)).then(() => callback(true)) } else { callback(true) } } else { callback(true); @@ -95,6 +130,8 @@ def get_transpiled_function(hog_function: HogFunction) -> str: return { init: init };""" ) - response += "\n})" + # Wrap in IIFE = Immediately Invoked (invokable) Function Expression = to avoid polluting global scope + # Add collected STL functions above the generated code + response = "(function() {\n" + compiler.get_stl_code() + "\n" + response + "\n})" return response diff --git a/posthog/cdp/templates/__init__.py b/posthog/cdp/templates/__init__.py index 3434fab954d160..57855bb7ca96ff 100644 --- a/posthog/cdp/templates/__init__.py +++ b/posthog/cdp/templates/__init__.py @@ -44,8 +44,10 @@ from ._siteapps.template_hogdesk import template as hogdesk from ._siteapps.template_notification_bar import template as notification_bar from ._siteapps.template_pineapple_mode import template as pineapple_mode +from ._siteapps.template_debug_posthog import template as debug_posthog from ._internal.template_broadcast import template_new_broadcast as _broadcast from ._internal.template_blank import blank_site_destination, blank_site_app +from ._transformations.template_pass_through import template as pass_through_transformation HOG_FUNCTION_TEMPLATES = [ _broadcast, @@ -96,6 +98,8 @@ hogdesk, notification_bar, pineapple_mode, + pass_through_transformation, + debug_posthog, ] diff --git a/posthog/cdp/templates/_internal/template_blank.py b/posthog/cdp/templates/_internal/template_blank.py index 4f141ad9fc3e8a..6b55ca796903fc 100644 --- a/posthog/cdp/templates/_internal/template_blank.py +++ b/posthog/cdp/templates/_internal/template_blank.py @@ -1,11 +1,11 @@ -from posthog.cdp.templates.hog_function_template import HogFunctionTemplate +from posthog.cdp.templates.hog_function_template import HogFunctionMappingTemplate, HogFunctionTemplate blank_site_destination: HogFunctionTemplate = HogFunctionTemplate( status="client-side", type="site_destination", id="template-blank-site-destination", name="New client-side destination", - description="Run code on your website when an event is sent to PostHog. Works only with posthog-js when opt_in_site_apps is set to true.", + description="New destination with complex event mapping. Works only with posthog-js when opt_in_site_apps is set to true.", icon_url="/static/hedgehog/builder-hog-01.png", category=["Custom", "Analytics"], hog=""" @@ -15,9 +15,9 @@ await new Promise((resolve) => window.setTimeout(resolve, 1000)) console.log("🦔 Script loaded") } -export function onEvent({ posthog, ...globals }) { - const { event, person } = globals - console.log(`🦔 Sending event: ${event.event}`, globals) +export function onEvent({ inputs, posthog }) { + console.log(`🦔 Sending event of type ${inputs.eventType}`, inputs.payload) + // fetch('url', { method: 'POST', body: JSON.stringify(inputs.payload) }) } """.strip(), inputs_schema=[ @@ -50,6 +50,88 @@ "required": True, }, ], + mappings=[], + mapping_templates=[ + HogFunctionMappingTemplate( + name="Aquisition", + include_by_default=True, + filters={"events": [{"id": "$pageview", "type": "events"}]}, + inputs_schema=[ + { + "key": "eventType", + "type": "string", + "label": "Event Type", + "description": "The destination's event type", + "default": "acquisition", + "required": True, + }, + { + "key": "payload", + "type": "json", + "label": "Payload", + "description": "Payload sent to the destination.", + "default": { + "event": "{event}", + "person": "{person}", + }, + "secret": False, + "required": True, + }, + ], + ), + HogFunctionMappingTemplate( + name="Conversion", + filters={"events": [{"id": "$autocapture", "type": "events"}]}, + inputs_schema=[ + { + "key": "eventType", + "type": "string", + "label": "Event Type", + "description": "The destination's event type", + "default": "conversion", + "required": True, + }, + { + "key": "payload", + "type": "json", + "label": "Payload", + "description": "Payload sent to the destination.", + "default": { + "event": "{event}", + "person": "{person}", + }, + "secret": False, + "required": True, + }, + ], + ), + HogFunctionMappingTemplate( + name="Retention", + filters={"events": [{"id": "$pageleave", "type": "events"}]}, + inputs_schema=[ + { + "key": "eventType", + "type": "string", + "label": "Event Type", + "description": "The destination's event type", + "default": "retention", + "required": True, + }, + { + "key": "payload", + "type": "json", + "label": "Payload", + "description": "Payload sent to the destination.", + "default": { + "event": "{event}", + "person": "{person}", + }, + "secret": False, + "required": True, + }, + ], + ), + ], ) blank_site_app: HogFunctionTemplate = HogFunctionTemplate( diff --git a/posthog/cdp/templates/_siteapps/template_debug_posthog.py b/posthog/cdp/templates/_siteapps/template_debug_posthog.py new file mode 100644 index 00000000000000..cf964a944bbb6b --- /dev/null +++ b/posthog/cdp/templates/_siteapps/template_debug_posthog.py @@ -0,0 +1,45 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + +template: HogFunctionTemplate = HogFunctionTemplate( + status="client-side", + type="site_app", + id="template-debug-posthog-js", + name="PostHog JS debugger", + description="Enable extra debugging tools on your posthog-js", + icon_url="/static/hedgehog/builder-hog-01.png", + category=["Custom"], + hog=""" +export function onLoad({ inputs, posthog }) { + console.log("Enabling PostHog.js debugging", posthog) + + if (inputs.enable_debugging) { + posthog.debug(true) + } + + if (inputs.capture_config) { + posthog.capture("posthog-js debug", { + config: posthog.config + }) + } +} +""".strip(), + inputs_schema=[ + { + "key": "capture_config", + "type": "boolean", + "label": "Capture debug event on load", + "secret": False, + "default": False, + "required": False, + "description": "Whether to capture an event on load including the posthog config", + }, + { + "key": "enable_debugging", + "type": "boolean", + "label": "Enable debugging", + "secret": False, + "default": False, + "required": False, + }, + ], +) diff --git a/posthog/cdp/templates/_transformations/template_pass_through.py b/posthog/cdp/templates/_transformations/template_pass_through.py new file mode 100644 index 00000000000000..5a4e88e003d31f --- /dev/null +++ b/posthog/cdp/templates/_transformations/template_pass_through.py @@ -0,0 +1,18 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + +template: HogFunctionTemplate = HogFunctionTemplate( + status="alpha", + type="transformation", + id="template-blank-transformation", + name="Custom transformation", + description="This is a starter template for custom transformations", + icon_url="/static/hedgehog/builder-hog-01.png", + category=["Custom"], + hog=""" +// This is a blank template for custom transformations +// The function receives `event` as a global object and expects it to be returned +// If you return null then the event will be discarded +return event +""".strip(), + inputs_schema=[], +) diff --git a/posthog/cdp/templates/google_ads/template_google_ads.py b/posthog/cdp/templates/google_ads/template_google_ads.py index 3743ca93db5417..9cc61d507fe563 100644 --- a/posthog/cdp/templates/google_ads/template_google_ads.py +++ b/posthog/cdp/templates/google_ads/template_google_ads.py @@ -55,6 +55,7 @@ "type": "integration", "integration": "google-ads", "label": "Google Ads account", + "requiredScopes": "https://www.googleapis.com/auth/adwords https://www.googleapis.com/auth/userinfo.email", "secret": False, "required": True, }, diff --git a/posthog/cdp/templates/helpers.py b/posthog/cdp/templates/helpers.py index e26f55f842a731..6f1eba04ae75d0 100644 --- a/posthog/cdp/templates/helpers.py +++ b/posthog/cdp/templates/helpers.py @@ -16,7 +16,7 @@ class BaseHogFunctionTemplateTest(BaseTest): def setUp(self): super().setUp() - self.compiled_hog = compile_hog(self.template.hog, supported_functions={"fetch", "postHogCapture"}) + self.compiled_hog = compile_hog(self.template.hog, self.template.type) self.mock_print = MagicMock(side_effect=lambda *args: print("[DEBUG HogFunctionPrint]", *args)) # noqa: T201 # Side effect - log the fetch call and return with sensible output diff --git a/posthog/cdp/templates/hog_function_template.py b/posthog/cdp/templates/hog_function_template.py index c3227f9b8eb73a..0ebfc1f1c37dcb 100644 --- a/posthog/cdp/templates/hog_function_template.py +++ b/posthog/cdp/templates/hog_function_template.py @@ -23,6 +23,22 @@ class HogFunctionSubTemplate: inputs: Optional[dict] = None +@dataclasses.dataclass(frozen=True) +class HogFunctionMapping: + filters: Optional[dict] = None + inputs: Optional[dict] = None + inputs_schema: Optional[list[dict]] = None + + +@dataclasses.dataclass(frozen=True) +class HogFunctionMappingTemplate: + name: str + include_by_default: Optional[bool] = None + filters: Optional[dict] = None + inputs: Optional[dict] = None + inputs_schema: Optional[list[dict]] = None + + @dataclasses.dataclass(frozen=True) class HogFunctionTemplate: status: Literal["alpha", "beta", "stable", "free", "client-side"] @@ -30,6 +46,7 @@ class HogFunctionTemplate: "destination", "site_destination", "site_app", + "transformation", "shared", "email", "sms", @@ -46,6 +63,8 @@ class HogFunctionTemplate: category: list[str] sub_templates: Optional[list[HogFunctionSubTemplate]] = None filters: Optional[dict] = None + mappings: Optional[list[HogFunctionMapping]] = None + mapping_templates: Optional[list[HogFunctionMappingTemplate]] = None masking: Optional[dict] = None icon_url: Optional[str] = None diff --git a/posthog/cdp/templates/hubspot/template_hubspot.py b/posthog/cdp/templates/hubspot/template_hubspot.py index f8f6c9cf06a723..a36c850725972f 100644 --- a/posthog/cdp/templates/hubspot/template_hubspot.py +++ b/posthog/cdp/templates/hubspot/template_hubspot.py @@ -61,6 +61,7 @@ "type": "integration", "integration": "hubspot", "label": "Hubspot connection", + "requiredScopes": "crm.objects.contacts.write crm.objects.contacts.read", "secret": False, "required": True, }, @@ -307,6 +308,7 @@ "type": "integration", "integration": "hubspot", "label": "Hubspot connection", + "requiredScopes": "analytics.behavioral_events.send behavioral_events.event_definitions.read_write", "secret": False, "required": True, }, diff --git a/posthog/cdp/templates/salesforce/template_salesforce.py b/posthog/cdp/templates/salesforce/template_salesforce.py index eedfd9980efb1d..844c86ad15803a 100644 --- a/posthog/cdp/templates/salesforce/template_salesforce.py +++ b/posthog/cdp/templates/salesforce/template_salesforce.py @@ -15,6 +15,7 @@ "type": "integration", "integration": "salesforce", "label": "Salesforce account", + "requiredScopes": "refresh_token full", "secret": False, "required": True, } diff --git a/posthog/cdp/templates/slack/template_slack.py b/posthog/cdp/templates/slack/template_slack.py index 16bb0383c1c0bb..8cfb5a84101de7 100644 --- a/posthog/cdp/templates/slack/template_slack.py +++ b/posthog/cdp/templates/slack/template_slack.py @@ -34,6 +34,7 @@ "type": "integration", "integration": "slack", "label": "Slack workspace", + "requiredScopes": "channels:read groups:read chat:write chat:write.customize", "secret": False, "required": True, }, diff --git a/posthog/cdp/templates/test_cdp_templates.py b/posthog/cdp/templates/test_cdp_templates.py index 4c873a9a820ec8..d4a5520a2fd187 100644 --- a/posthog/cdp/templates/test_cdp_templates.py +++ b/posthog/cdp/templates/test_cdp_templates.py @@ -10,8 +10,9 @@ def setUp(self): def test_templates_are_valid(self): for template in HOG_FUNCTION_TEMPLATES: - assert validate_inputs_schema(template.inputs_schema) + if template.inputs_schema: + assert validate_inputs_schema(template.inputs_schema) if template.type not in TYPES_WITH_TRANSPILED_FILTERS: - bytecode = compile_hog(template.hog) + bytecode = compile_hog(template.hog, template.type) assert bytecode[0] == "_H" diff --git a/posthog/cdp/test/test_site_functions.py b/posthog/cdp/test/test_site_functions.py index 44f0e11f373ae8..9370cb7266740b 100644 --- a/posthog/cdp/test/test_site_functions.py +++ b/posthog/cdp/test/test_site_functions.py @@ -77,7 +77,8 @@ def test_get_transpiled_function_basic(self): const filterGlobals = { ...globals.groups, ...globals.event, person: globals.person, inputs, pdi: { distinct_id: globals.event.distinct_id, person: globals.person } }; let __getGlobal = (key) => filterGlobals[key]; const filterMatches = true; - if (filterMatches) { source.onEvent({ ...globals, inputs, posthog }); } + if (!filterMatches) { return; } + ; } } @@ -85,7 +86,12 @@ def test_get_transpiled_function_basic(self): const posthog = config.posthog; const callback = config.callback; if ('onLoad' in source) { - const r = source.onLoad({ inputs: buildInputs({}, true), posthog: posthog }); + const globals = { + person: { + properties: posthog.get_property('$stored_person_properties'), + } + } + const r = source.onLoad({ inputs: buildInputs(globals, true), posthog: posthog }); if (r && typeof r.then === 'function' && typeof r.finally === 'function') { r.catch(() => callback(false)).then(() => callback(true)) } else { callback(true) } } else { callback(true); @@ -131,7 +137,8 @@ def test_get_transpiled_function_with_filters(self): assert "console.log(event.event);" in result assert "const filterMatches = " in result assert '__getGlobal("event") == "$pageview"' in result - assert "if (filterMatches) { source.onEvent({" in result + assert "const filterMatches = !!(!!((__getGlobal" in result + assert "if (!filterMatches) { return; }" in result def test_get_transpiled_function_with_invalid_template_input(self): self.hog_function.hog = "export function onLoad() { console.log(inputs.greeting); }" @@ -255,3 +262,24 @@ def test_get_transpiled_function_with_complex_filters(self): assert "const filterMatches = " in result assert '__getGlobal("event") == "$pageview"' in result assert "https://example.com" in result + + def test_get_transpiled_function_with_mappings(self): + self.hog_function.hog = "export function onLoad({ inputs, posthog }) { console.log(inputs); }" + self.hog_function.inputs = {"greeting": {"value": "Hello, {person.properties.nonexistent_property}!"}} + self.hog_function.filters = { + "events": [{"id": "$pageview", "name": "$pageview", "type": "events"}], + } + self.hog_function.mappings = [ + { + "inputs": {"greeting": {"value": "Hallo, {person.properties.nonexistent_property}!"}}, + "filters": {"events": [{"id": "$autocapture", "name": "$autocapture", "type": "events"}]}, + } + ] + + result = self.compile_and_run() + + assert "console.log(inputs);" in result + assert 'const filterMatches = !!(!!((__getGlobal("event") == "$pageview")));' in result + assert 'if (!!(!!((__getGlobal("event") == "$autocapture")))) {' in result + assert "const newInputs = structuredClone(inputs);" in result + assert 'newInputs["greeting"] = concat("Hallo, ", __getProperty' in result diff --git a/posthog/cdp/validation.py b/posthog/cdp/validation.py index 0ca2fa353dc262..ac7f19405cfd59 100644 --- a/posthog/cdp/validation.py +++ b/posthog/cdp/validation.py @@ -65,6 +65,7 @@ class InputsSchemaItemSerializer(serializers.Serializer): integration = serializers.CharField(required=False) integration_key = serializers.CharField(required=False) integration_field = serializers.ChoiceField(choices=["slack_channel"], required=False) + requiredScopes = serializers.CharField(required=False) # TODO Validate choices if type=choice @@ -184,13 +185,16 @@ def validate_inputs( return validated_inputs -def compile_hog(hog: str, supported_functions: Optional[set[str]] = None, in_repl: Optional[bool] = False) -> list[Any]: +def compile_hog(hog: str, hog_type: str, in_repl: Optional[bool] = False) -> list[Any]: # Attempt to compile the hog try: program = parse_program(hog) - return create_bytecode( - program, supported_functions=supported_functions or {"fetch", "postHogCapture"}, in_repl=in_repl - ).bytecode + supported_functions = set() + + if hog_type == "destination": + supported_functions = {"fetch", "postHogCapture"} + + return create_bytecode(program, supported_functions=supported_functions, in_repl=in_repl).bytecode except Exception as e: logger.error(f"Failed to compile hog {e}", exc_info=True) raise serializers.ValidationError({"hog": "Hog code has errors."}) diff --git a/posthog/helpers/dashboard_templates.py b/posthog/helpers/dashboard_templates.py index 0e3f8a81f95364..313f8c6722a2ae 100644 --- a/posthog/helpers/dashboard_templates.py +++ b/posthog/helpers/dashboard_templates.py @@ -4,22 +4,6 @@ import structlog from posthog.constants import ( - BREAKDOWN, - BREAKDOWN_TYPE, - DATE_FROM, - DISPLAY, - FILTER_TEST_ACCOUNTS, - INSIGHT, - INSIGHT_TRENDS, - INTERVAL, - PROPERTIES, - TREND_FILTER_TYPE_EVENTS, - TRENDS_BAR_VALUE, - TRENDS_BOLD_NUMBER, - TRENDS_LINEAR, - TRENDS_TABLE, - TRENDS_WORLD_MAP, - UNIQUE_USERS, AvailableFeature, ENRICHED_DASHBOARD_INSIGHT_IDENTIFIER, ) @@ -37,7 +21,7 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: - dashboard.filters = {DATE_FROM: "-30d"} + dashboard.filters = {"date_from": "-30d"} if dashboard.team.organization.is_feature_available(AvailableFeature.TAGGING): tag, _ = Tag.objects.get_or_create( name="marketing", @@ -52,19 +36,28 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: dashboard, name="Website Unique Users (Total)", description="Shows the number of unique users that use your app every day.", - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$pageview", - "math": UNIQUE_USERS, - "type": TREND_FILTER_TYPE_EVENTS, - } - ], - INTERVAL: "day", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: TRENDS_BOLD_NUMBER, - "compare": True, + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown_type": "event"}, + "compareFilter": {"compare": True}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "day", + "kind": "TrendsQuery", + "properties": [], + "series": [{"event": "$pageview", "kind": "EventsNode", "math": "dau", "name": "$pageview"}], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "BoldNumber", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", + }, + }, }, layouts={ "sm": {"i": "21", "x": 0, "y": 0, "w": 6, "h": 5, "minW": 3, "minH": 5}, @@ -85,40 +78,43 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: dashboard, name="Organic SEO Unique Users (Total)", description="", - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$pageview", - "math": UNIQUE_USERS, - "type": TREND_FILTER_TYPE_EVENTS, - } - ], - INTERVAL: "day", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: TRENDS_BOLD_NUMBER, - "compare": True, - PROPERTIES: { - "type": "AND", - "values": [ - { - "type": "AND", - "values": [ - { - "key": "$referring_domain", - "type": "event", - "value": "google", - "operator": "icontains", - }, - { - "key": "utm_source", - "type": "event", - "value": "is_not_set", - "operator": "is_not_set", - }, - ], - } - ], + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown_type": "event"}, + "compareFilter": {"compare": True}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "day", + "kind": "TrendsQuery", + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + { + "key": "$referring_domain", + "operator": "icontains", + "type": "event", + "value": "google", + }, + {"key": "utm_source", "operator": "is_not_set", "type": "event", "value": "is_not_set"}, + ], + } + ], + }, + "series": [{"event": "$pageview", "kind": "EventsNode", "math": "dau", "name": "$pageview"}], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "BoldNumber", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", + }, }, }, layouts={ @@ -141,18 +137,27 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: dashboard, name="Website Unique Users (Breakdown)", description="", - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$pageview", - "math": UNIQUE_USERS, - "type": TREND_FILTER_TYPE_EVENTS, - } - ], - INTERVAL: "week", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: "ActionsBar", + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown_type": "event"}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "week", + "kind": "TrendsQuery", + "properties": [], + "series": [{"event": "$pageview", "kind": "EventsNode", "math": "dau", "name": "$pageview"}], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "ActionsBar", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", + }, + }, }, layouts={ "sm": {"i": "23", "x": 0, "y": 5, "w": 6, "h": 5, "minW": 3, "minH": 5}, @@ -173,32 +178,38 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: dashboard, name="Organic SEO Unique Users (Breakdown)", description="", - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$pageview", - "math": UNIQUE_USERS, - "type": TREND_FILTER_TYPE_EVENTS, - PROPERTIES: [ - { - "key": "$referring_domain", - "type": "event", - "value": "google", - "operator": "icontains", - }, - { - "key": "utm_source", - "type": "event", - "value": "is_not_set", - "operator": "is_not_set", - }, - ], - } - ], - INTERVAL: "week", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: "ActionsBar", + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown_type": "event"}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "week", + "kind": "TrendsQuery", + "properties": [], + "series": [ + { + "event": "$pageview", + "kind": "EventsNode", + "math": "dau", + "name": "$pageview", + "properties": [ + {"key": "$referring_domain", "operator": "icontains", "type": "event", "value": "google"}, + {"key": "utm_source", "operator": "is_not_set", "type": "event", "value": "is_not_set"}, + ], + } + ], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "ActionsBar", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", + }, + }, }, layouts={ "sm": {"i": "24", "x": 6, "y": 5, "w": 6, "h": 5, "minW": 3, "minH": 5}, @@ -213,30 +224,31 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: dashboard, name="Sessions Per User", description="", - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$pageview", - "math": UNIQUE_USERS, - "name": "$pageview", - "type": TREND_FILTER_TYPE_EVENTS, - "order": 0, - PROPERTIES: [], - }, - { - "id": "$pageview", - "math": "unique_session", - "name": "$pageview", - "type": TREND_FILTER_TYPE_EVENTS, - "order": 1, - PROPERTIES: [], + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown_type": "event"}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "week", + "kind": "TrendsQuery", + "properties": [], + "series": [ + {"event": "$pageview", "kind": "EventsNode", "math": "dau", "name": "$pageview"}, + {"event": "$pageview", "kind": "EventsNode", "math": "unique_session", "name": "$pageview"}, + ], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "ActionsLineGraph", + "formula": "B/A", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", }, - ], - INTERVAL: "week", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: "ActionsLineGraph", - "formula": "B/A", + }, }, layouts={ "sm": {"i": "25", "x": 0, "y": 10, "w": 6, "h": 5, "minW": 3, "minH": 5}, @@ -257,30 +269,31 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: dashboard, name="Pages Per User", description="", - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$pageview", - "math": "total", - "name": "$pageview", - "type": TREND_FILTER_TYPE_EVENTS, - "order": 0, - PROPERTIES: [], - }, - { - "id": "$pageview", - "math": UNIQUE_USERS, - "name": "$pageview", - "type": TREND_FILTER_TYPE_EVENTS, - "order": 1, - PROPERTIES: [], + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown_type": "event"}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "week", + "kind": "TrendsQuery", + "properties": [], + "series": [ + {"event": "$pageview", "kind": "EventsNode", "math": "total", "name": "$pageview"}, + {"event": "$pageview", "kind": "EventsNode", "math": "dau", "name": "$pageview"}, + ], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "ActionsLineGraph", + "formula": "A/B", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", }, - ], - INTERVAL: "week", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: "ActionsLineGraph", - "formula": "A/B", + }, }, layouts={ "sm": {"i": "26", "x": 6, "y": 10, "w": 6, "h": 5, "minW": 3, "minH": 5}, @@ -303,37 +316,36 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: dashboard, name="Top Website Pages (Overall)", description="", - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$pageview", - "math": "unique_session", - "name": "$pageview", - "type": TREND_FILTER_TYPE_EVENTS, - "order": 0, - } - ], - INTERVAL: "day", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: TRENDS_BAR_VALUE, - BREAKDOWN: "$current_url", - BREAKDOWN_TYPE: "event", - PROPERTIES: { - "type": "AND", - "values": [ - { - "type": "AND", - "values": [ - { - "key": "$current_url", - "type": "event", - "value": "?", - "operator": "not_icontains", - } - ], - } - ], + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown": "$current_url", "breakdown_type": "event"}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "day", + "kind": "TrendsQuery", + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + {"key": "$current_url", "operator": "not_icontains", "type": "event", "value": "?"} + ], + } + ], + }, + "series": [{"event": "$pageview", "kind": "EventsNode", "math": "unique_session", "name": "$pageview"}], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "ActionsBarValue", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", + }, }, }, layouts={ @@ -355,43 +367,42 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: dashboard, name="Top Website Pages (via Google)", description="", - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$pageview", - "math": "unique_session", - "name": "$pageview", - "type": TREND_FILTER_TYPE_EVENTS, - "order": 0, - } - ], - INTERVAL: "day", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: TRENDS_BAR_VALUE, - BREAKDOWN: "$current_url", - BREAKDOWN_TYPE: "event", - PROPERTIES: { - "type": "AND", - "values": [ - { - "type": "AND", - "values": [ - { - "key": "$current_url", - "type": "event", - "value": "?", - "operator": "not_icontains", - }, - { - "key": "$referring_domain", - "type": "event", - "value": "google", - "operator": "icontains", - }, - ], - } - ], + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown": "$current_url", "breakdown_type": "event"}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "day", + "kind": "TrendsQuery", + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + {"key": "$current_url", "operator": "not_icontains", "type": "event", "value": "?"}, + { + "key": "$referring_domain", + "operator": "icontains", + "type": "event", + "value": "google", + }, + ], + } + ], + }, + "series": [{"event": "$pageview", "kind": "EventsNode", "math": "unique_session", "name": "$pageview"}], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "ActionsBarValue", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", + }, }, }, layouts={ @@ -407,22 +418,27 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: dashboard, name="Website Users by Location", description="", - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$pageview", - "math": UNIQUE_USERS, - "name": "$pageview", - "type": TREND_FILTER_TYPE_EVENTS, - "order": 0, - } - ], - INTERVAL: "day", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: TRENDS_WORLD_MAP, - BREAKDOWN: "$geoip_country_code", - BREAKDOWN_TYPE: "person", + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown": "$geoip_country_code", "breakdown_type": "person"}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "day", + "kind": "TrendsQuery", + "properties": [], + "series": [{"event": "$pageview", "kind": "EventsNode", "math": "dau", "name": "$pageview"}], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "WorldMap", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", + }, + }, }, layouts={ "sm": {"i": "29", "x": 0, "y": 23, "w": 12, "h": 8, "minW": 3, "minH": 5}, @@ -471,11 +487,9 @@ def create_from_template(dashboard: Dashboard, template: DashboardTemplate) -> N for template_tile in template.tiles: if template_tile["type"] == "INSIGHT": query = template_tile.get("query", None) - filters = template_tile.get("filters") if not query else {} _create_tile_for_insight( dashboard, name=template_tile.get("name"), - filters=filters, query=query, description=template_tile.get("description"), color=template_tile.get("color"), @@ -508,18 +522,15 @@ def _create_tile_for_text(dashboard: Dashboard, body: str, layouts: dict, color: def _create_tile_for_insight( dashboard: Dashboard, name: str, - filters: dict, description: str, layouts: dict, color: Optional[str], query: Optional[dict] = None, ) -> None: - filter_test_accounts = filters.get("filter_test_accounts", True) insight = Insight.objects.create( team=dashboard.team, name=name, description=description, - filters={**filters, "filter_test_accounts": filter_test_accounts}, is_sample=True, query=query, ) @@ -547,7 +558,7 @@ def create_dashboard_from_template(template_key: str, dashboard: Dashboard) -> N def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None: - dashboard.filters = {DATE_FROM: "-30d"} + dashboard.filters = {"date_from": "-30d"} if dashboard.team.organization.is_feature_available(AvailableFeature.TAGGING): tag, _ = Tag.objects.get_or_create( name="feature flags", @@ -562,36 +573,42 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None: dashboard, name="Feature Flag Called Total Volume", description="Shows the number of total calls made on feature flag with key: " + feature_flag.key, - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$feature_flag_called", - "name": "$feature_flag_called", - "type": TREND_FILTER_TYPE_EVENTS, - } - ], - INTERVAL: "day", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: TRENDS_LINEAR, - PROPERTIES: { - "type": "AND", - "values": [ - { - "type": "AND", - "values": [ - { - "key": "$feature_flag", - "type": "event", - "value": feature_flag.key, - }, - ], - } - ], + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown": "$feature_flag_response", "breakdown_type": "event"}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "day", + "kind": "TrendsQuery", + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + { + "key": "$feature_flag", + "operator": "exact", + "type": "event", + "value": feature_flag.key, + } + ], + } + ], + }, + "series": [{"event": "$feature_flag_called", "kind": "EventsNode", "name": "$feature_flag_called"}], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "ActionsLineGraph", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", + }, }, - BREAKDOWN: "$feature_flag_response", - BREAKDOWN_TYPE: "event", - FILTER_TEST_ACCOUNTS: False, }, layouts={ "sm": {"i": "21", "x": 0, "y": 0, "w": 6, "h": 5, "minW": 3, "minH": 5}, @@ -613,37 +630,49 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None: name="Feature Flag calls made by unique users per variant", description="Shows the number of unique user calls made on feature flag per variant with key: " + feature_flag.key, - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$feature_flag_called", - "name": "$feature_flag_called", - "math": UNIQUE_USERS, - "type": TREND_FILTER_TYPE_EVENTS, - } - ], - INTERVAL: "day", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: TRENDS_TABLE, - PROPERTIES: { - "type": "AND", - "values": [ + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown": "$feature_flag_response", "breakdown_type": "event"}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "day", + "kind": "TrendsQuery", + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + { + "key": "$feature_flag", + "operator": "exact", + "type": "event", + "value": feature_flag.key, + } + ], + } + ], + }, + "series": [ { - "type": "AND", - "values": [ - { - "key": "$feature_flag", - "type": "event", - "value": feature_flag.key, - }, - ], + "event": "$feature_flag_called", + "kind": "EventsNode", + "math": "dau", + "name": "$feature_flag_called", } ], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "ActionsTable", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", + }, }, - BREAKDOWN: "$feature_flag_response", - BREAKDOWN_TYPE: "event", - FILTER_TEST_ACCOUNTS: False, }, layouts={ "sm": {"i": "22", "x": 6, "y": 0, "w": 6, "h": 5, "minW": 3, "minH": 5}, @@ -667,40 +696,45 @@ def add_enriched_insights_to_feature_flag_dashboard(feature_flag, dashboard: Das dashboard, name=f"{ENRICHED_DASHBOARD_INSIGHT_IDENTIFIER} Total Volume", description="Shows the total number of times this feature was viewed and interacted with", - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$feature_view", - "name": "Feature View - Total", - "type": TREND_FILTER_TYPE_EVENTS, - }, - { - "id": "$feature_view", - "name": "Feature View - Unique users", - "type": TREND_FILTER_TYPE_EVENTS, - "math": UNIQUE_USERS, + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown_type": "event"}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "day", + "kind": "TrendsQuery", + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + {"key": "feature_flag", "operator": "exact", "type": "event", "value": feature_flag.key} + ], + } + ], }, - ], - INTERVAL: "day", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: TRENDS_LINEAR, - PROPERTIES: { - "type": "AND", - "values": [ + "series": [ + {"event": "$feature_view", "kind": "EventsNode", "name": "Feature View - Total"}, { - "type": "AND", - "values": [ - { - "key": "feature_flag", - "type": "event", - "value": feature_flag.key, - }, - ], - } + "event": "$feature_view", + "kind": "EventsNode", + "math": "dau", + "name": "Feature View - Unique users", + }, ], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "ActionsLineGraph", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", + }, }, - FILTER_TEST_ACCOUNTS: False, }, layouts={}, color=None, @@ -710,40 +744,45 @@ def add_enriched_insights_to_feature_flag_dashboard(feature_flag, dashboard: Das dashboard, name="Feature Interaction Total Volume", description="Shows the total number of times this feature was viewed and interacted with", - filters={ - TREND_FILTER_TYPE_EVENTS: [ - { - "id": "$feature_interaction", - "name": "Feature Interaction - Total", - "type": TREND_FILTER_TYPE_EVENTS, - }, - { - "id": "$feature_interaction", - "name": "Feature Interaction - Unique users", - "type": TREND_FILTER_TYPE_EVENTS, - "math": UNIQUE_USERS, + query={ + "kind": "InsightVizNode", + "source": { + "breakdownFilter": {"breakdown_type": "event"}, + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "filterTestAccounts": False, + "interval": "day", + "kind": "TrendsQuery", + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + {"key": "feature_flag", "operator": "exact", "type": "event", "value": feature_flag.key} + ], + } + ], }, - ], - INTERVAL: "day", - INSIGHT: INSIGHT_TRENDS, - DATE_FROM: "-30d", - DISPLAY: TRENDS_LINEAR, - PROPERTIES: { - "type": "AND", - "values": [ + "series": [ + {"event": "$feature_interaction", "kind": "EventsNode", "name": "Feature Interaction - Total"}, { - "type": "AND", - "values": [ - { - "key": "feature_flag", - "type": "event", - "value": feature_flag.key, - }, - ], - } + "event": "$feature_interaction", + "kind": "EventsNode", + "math": "dau", + "name": "Feature Interaction - Unique users", + }, ], + "trendsFilter": { + "aggregationAxisFormat": "numeric", + "display": "ActionsLineGraph", + "showAlertThresholdLines": False, + "showLegend": False, + "showPercentStackView": False, + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "yAxisScaleType": "linear", + }, }, - FILTER_TEST_ACCOUNTS: False, }, layouts={}, color=None, diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py index 9ca4500aa2abd1..dd8ffc8a377a93 100644 --- a/posthog/hogql/database/database.py +++ b/posthog/hogql/database/database.py @@ -45,6 +45,11 @@ RawPersonDistinctIdOverridesTable, join_with_person_distinct_id_overrides_table, ) +from posthog.hogql.database.schema.error_tracking_issue_fingerprint_overrides import ( + ErrorTrackingIssueFingerprintOverridesTable, + RawErrorTrackingIssueFingerprintOverridesTable, + join_with_error_tracking_issue_fingerprint_overrides_table, +) from posthog.hogql.database.schema.person_distinct_ids import ( PersonDistinctIdsTable, RawPersonDistinctIdsTable, @@ -54,6 +59,7 @@ RawPersonsTable, join_with_persons_table, ) +from posthog.hogql.database.schema.query_log import QueryLogTable, RawQueryLogTable from posthog.hogql.database.schema.session_replay_events import ( RawSessionReplayEventsTable, SessionReplayEventsTable, @@ -104,11 +110,15 @@ class Database(BaseModel): persons: PersonsTable = PersonsTable() person_distinct_ids: PersonDistinctIdsTable = PersonDistinctIdsTable() person_distinct_id_overrides: PersonDistinctIdOverridesTable = PersonDistinctIdOverridesTable() + error_tracking_issue_fingerprint_overrides: ErrorTrackingIssueFingerprintOverridesTable = ( + ErrorTrackingIssueFingerprintOverridesTable() + ) session_replay_events: SessionReplayEventsTable = SessionReplayEventsTable() cohort_people: CohortPeople = CohortPeople() static_cohort_people: StaticCohortPeople = StaticCohortPeople() log_entries: LogEntriesTable = LogEntriesTable() + query_log: QueryLogTable = QueryLogTable() app_metrics: AppMetrics2Table = AppMetrics2Table() console_logs_log_entries: ReplayConsoleLogsLogEntriesTable = ReplayConsoleLogsLogEntriesTable() batch_export_log_entries: BatchExportLogEntriesTable = BatchExportLogEntriesTable() @@ -121,7 +131,11 @@ class Database(BaseModel): raw_groups: RawGroupsTable = RawGroupsTable() raw_cohort_people: RawCohortPeople = RawCohortPeople() raw_person_distinct_id_overrides: RawPersonDistinctIdOverridesTable = RawPersonDistinctIdOverridesTable() + raw_error_tracking_issue_fingerprint_overrides: RawErrorTrackingIssueFingerprintOverridesTable = ( + RawErrorTrackingIssueFingerprintOverridesTable() + ) raw_sessions: Union[RawSessionsTableV1, RawSessionsTableV2] = RawSessionsTableV1() + raw_query_log: RawQueryLogTable = RawQueryLogTable() # system tables numbers: NumbersTable = NumbersTable() @@ -139,6 +153,7 @@ class Database(BaseModel): "app_metrics", "sessions", "heatmaps", + "query_log", ] _warehouse_table_names: list[str] = [] @@ -213,6 +228,27 @@ def _use_person_id_from_person_overrides(database: Database) -> None: ) +def _use_error_tracking_issue_id_from_error_tracking_issue_overrides(database: Database) -> None: + database.events.fields["event_issue_id"] = ExpressionField( + name="event_issue_id", + # convert to UUID to match type of `issue_id` on overrides table + expr=parse_expr("toUUID(properties.$exception_issue_id)"), + ) + database.events.fields["exception_issue_override"] = LazyJoin( + from_field=["fingerprint"], + join_table=ErrorTrackingIssueFingerprintOverridesTable(), + join_function=join_with_error_tracking_issue_fingerprint_overrides_table, + ) + database.events.fields["issue_id"] = ExpressionField( + name="issue_id", + expr=parse_expr( + # NOTE: assumes `join_use_nulls = 0` (the default), as ``override.fingerprint`` is not Nullable + "if(not(empty(exception_issue_override.issue_id)), exception_issue_override.issue_id, event_issue_id)", + start=None, + ), + ) + + def create_hogql_database( team_id: int, modifiers: Optional[HogQLQueryModifiers] = None, team_arg: Optional["Team"] = None ) -> Database: @@ -280,6 +316,8 @@ def create_hogql_database( ) cast(LazyJoin, raw_replay_events.fields["events"]).join_table = events + _use_error_tracking_issue_id_from_error_tracking_issue_overrides(database) + database.persons.fields["$virt_initial_referring_domain_type"] = create_initial_domain_type( "$virt_initial_referring_domain_type" ) @@ -412,9 +450,11 @@ def define_mappings(warehouse: dict[str, Table], get_table: Callable): from_field=from_field, to_field=to_field, join_table=joining_table, - join_function=join.join_function_for_experiments() - if "events" == join.joining_table_name and join.configuration.get("experiments_optimized") - else join.join_function(), + join_function=( + join.join_function_for_experiments() + if "events" == join.joining_table_name and join.configuration.get("experiments_optimized") + else join.join_function() + ), ) if join.source_table_name == "persons": diff --git a/posthog/hogql/database/schema/error_tracking_issue_fingerprint_overrides.py b/posthog/hogql/database/schema/error_tracking_issue_fingerprint_overrides.py new file mode 100644 index 00000000000000..aae04d72cfd42c --- /dev/null +++ b/posthog/hogql/database/schema/error_tracking_issue_fingerprint_overrides.py @@ -0,0 +1,94 @@ +from posthog.hogql.ast import SelectQuery +from posthog.hogql.constants import HogQLQuerySettings +from posthog.hogql.context import HogQLContext + +from posthog.hogql.database.argmax import argmax_select +from posthog.hogql.database.models import ( + Table, + IntegerDatabaseField, + StringDatabaseField, + BooleanDatabaseField, + LazyTable, + FieldOrTable, + LazyTableToAdd, + LazyJoinToAdd, +) +from posthog.hogql.errors import ResolutionError + +ERROR_TRACKING_ISSUE_FINGERPRINT_OVERRIDES_FIELDS: dict[str, FieldOrTable] = { + "team_id": IntegerDatabaseField(name="team_id"), + "fingerprint": StringDatabaseField(name="fingerprint"), + "issue_id": StringDatabaseField(name="issue_id"), +} + + +def join_with_error_tracking_issue_fingerprint_overrides_table( + join_to_add: LazyJoinToAdd, + context: HogQLContext, + node: SelectQuery, +): + from posthog.hogql import ast + + if not join_to_add.fields_accessed: + raise ResolutionError("No fields requested from error_tracking_issue_fingerprint_overrides") + join_expr = ast.JoinExpr( + table=select_from_error_tracking_issue_fingerprint_overrides_table(join_to_add.fields_accessed) + ) + join_expr.join_type = "LEFT OUTER JOIN" + join_expr.alias = join_to_add.to_table + join_expr.constraint = ast.JoinConstraint( + expr=ast.CompareOperation( + op=ast.CompareOperationOp.Eq, + left=ast.Field(chain=[join_to_add.from_table, "properties", "$exception_fingerprint"]), + right=ast.Field(chain=[join_to_add.to_table, "fingerprint"]), + ), + constraint_type="ON", + ) + return join_expr + + +def select_from_error_tracking_issue_fingerprint_overrides_table(requested_fields: dict[str, list[str | int]]): + # Always include "issue_id", as it's the key we use to make further joins, and it'd be great if it's available + if "issue_id" not in requested_fields: + requested_fields = {**requested_fields, "issue_id": ["issue_id"]} + select = argmax_select( + table_name="raw_error_tracking_issue_fingerprint_overrides", + select_fields=requested_fields, + group_fields=["fingerprint"], + argmax_field="version", + deleted_field="is_deleted", + ) + select.settings = HogQLQuerySettings(optimize_aggregation_in_order=True) + return select + + +class RawErrorTrackingIssueFingerprintOverridesTable(Table): + fields: dict[str, FieldOrTable] = { + **ERROR_TRACKING_ISSUE_FINGERPRINT_OVERRIDES_FIELDS, + "is_deleted": BooleanDatabaseField(name="is_deleted"), + "version": IntegerDatabaseField(name="version"), + } + + def to_printed_clickhouse(self, context): + return "error_tracking_issue_fingerprint_overrides" + + def to_printed_hogql(self): + return "raw_error_tracking_issue_fingerprint_overrides" + + +class ErrorTrackingIssueFingerprintOverridesTable(LazyTable): + fields: dict[str, FieldOrTable] = ERROR_TRACKING_ISSUE_FINGERPRINT_OVERRIDES_FIELDS + + def lazy_select( + self, + table_to_add: LazyTableToAdd, + context: HogQLContext, + node: SelectQuery, + ): + return select_from_error_tracking_issue_fingerprint_overrides_table(table_to_add.fields_accessed) + + def to_printed_clickhouse(self, context): + return "error_tracking_issue_fingerprint_overrides" + + def to_printed_hogql(self): + return "error_tracking_issue_fingerprint_overrides" diff --git a/posthog/hogql/database/schema/query_log.py b/posthog/hogql/database/schema/query_log.py new file mode 100644 index 00000000000000..873ebfa50a239d --- /dev/null +++ b/posthog/hogql/database/schema/query_log.py @@ -0,0 +1,131 @@ +from typing import Any + +from posthog.hogql import ast +from posthog.hogql.database.models import ( + IntegerDatabaseField, + StringDatabaseField, + DateTimeDatabaseField, + LazyTable, + FieldOrTable, + LazyTableToAdd, + FloatDatabaseField, + FunctionCallTable, + BooleanDatabaseField, +) + +QUERY_LOG_FIELDS: dict[str, FieldOrTable] = { + "query_id": StringDatabaseField(name="query_id"), + "query": StringDatabaseField(name="query"), # + "query_start_time": DateTimeDatabaseField(name="event_time"), # + "query_duration_ms": FloatDatabaseField(name="query_duration_ms"), # + "created_by": IntegerDatabaseField(name="created_by"), + "read_rows": IntegerDatabaseField(name="read_rows"), + "read_bytes": IntegerDatabaseField(name="read_bytes"), + "result_rows": IntegerDatabaseField(name="result_rows"), + "result_bytes": IntegerDatabaseField(name="result_bytes"), + "memory_usage": IntegerDatabaseField(name="memory_usage"), + "status": StringDatabaseField(name="type"), + "kind": StringDatabaseField(name="kind"), + "query_type": StringDatabaseField(name="query_type"), + "is_personal_api_key_request": BooleanDatabaseField(name="is_personal_api_key_request"), +} + +RAW_QUERY_LOG_FIELDS: dict[str, FieldOrTable] = QUERY_LOG_FIELDS | { + # below fields are necessary to compute some of the resulting fields + "type": StringDatabaseField(name="type"), + "is_initial_query": BooleanDatabaseField(name="is_initial_query"), + "log_comment": StringDatabaseField(name="log_comment"), +} + +STRING_FIELDS = { + "query_type": ["query_type"], + "query_id": ["client_query_id"], + "query": ["query", "query"], + "kind": ["query", "kind"], +} +INT_FIELDS = {"created_by": ["user_id"]} + + +class QueryLogTable(LazyTable): + fields: dict[str, FieldOrTable] = QUERY_LOG_FIELDS + + def to_printed_clickhouse(self, context) -> str: + return "query_log" + + def to_printed_hogql(self) -> str: + return "query_log" + + def lazy_select(self, table_to_add: LazyTableToAdd, context, node) -> Any: + requested_fields = table_to_add.fields_accessed + + raw_table_name = "raw_query_log" + + def get_alias(name, chain): + if name in STRING_FIELDS: + keys = STRING_FIELDS[name] + expr = ast.Call( + name="JSONExtractString", + args=[ast.Field(chain=[raw_table_name, "log_comment"])] + [ast.Constant(value=v) for v in keys], + ) + return ast.Alias(alias=name, expr=expr) + if name in INT_FIELDS: + keys = INT_FIELDS[name] + expr = ast.Call( + name="JSONExtractInt", + args=[ast.Field(chain=[raw_table_name, "log_comment"])] + [ast.Constant(value=v) for v in keys], + ) + return ast.Alias(alias=name, expr=expr) + if name == "is_personal_api_key_request": + cmp_expr = ast.CompareOperation( + op=ast.CompareOperationOp.Eq, + left=ast.Constant(value="personal_api_key"), + right=ast.Call( + name="JSONExtractString", + args=[ast.Field(chain=["log_comment"]), ast.Constant(value="access_method")], + ), + ) + return ast.Alias(alias=name, expr=cmp_expr) + return ast.Alias(alias=name, expr=ast.Field(chain=[raw_table_name, *chain])) + + fields: list[ast.Expr] = [get_alias(name, chain) for name, chain in requested_fields.items()] + + return ast.SelectQuery( + select=fields, + select_from=ast.JoinExpr(table=ast.Field(chain=[raw_table_name])), + where=ast.And( + exprs=[ + ast.CompareOperation( + op=ast.CompareOperationOp.Eq, + left=ast.Constant(value=context.project_id), + right=ast.Call( + name="JSONExtractInt", + args=[ast.Field(chain=["log_comment"]), ast.Constant(value="user_id")], + ), + ), + ast.CompareOperation( + op=ast.CompareOperationOp.In, + left=ast.Field(chain=["type"]), + right=ast.Array( + exprs=[ + ast.Constant(value="QueryFinish"), + ast.Constant(value="ExceptionBeforeStart"), + ast.Constant(value="ExceptionWhileProcessing"), + ] + ), + ), + ast.Field(chain=["is_initial_query"]), + ] + ), + ) + + +class RawQueryLogTable(FunctionCallTable): + fields: dict[str, FieldOrTable] = RAW_QUERY_LOG_FIELDS + + name: str = "raw_query_log" + + def to_printed_clickhouse(self, context) -> str: + return "clusterAllReplicas(posthog, system.query_log)" + + def to_printed_hogql(self) -> str: + return "query_log" diff --git a/posthog/hogql/database/schema/test/test_table_query_log.py b/posthog/hogql/database/schema/test/test_table_query_log.py new file mode 100644 index 00000000000000..d892520f31ca08 --- /dev/null +++ b/posthog/hogql/database/schema/test/test_table_query_log.py @@ -0,0 +1,53 @@ +from unittest.mock import MagicMock, patch +from posthog.clickhouse.client import sync_execute +from posthog.hogql.context import HogQLContext +from posthog.hogql.database.database import create_hogql_database +from posthog.hogql.query import execute_hogql_query +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, +) + + +class TestPersonOptimization(ClickhouseTestMixin, APIBaseTest): + """ + Mostly tests for the optimization of pre-filtering before aggregating. See https://github.com/PostHog/posthog/pull/25604 + """ + + def setUp(self): + super().setUp() + self.database = create_hogql_database(self.team.pk) + self.context = HogQLContext(database=self.database, team_id=self.team.pk, enable_select_queries=True) + + @patch("posthog.hogql.query.sync_execute", wraps=sync_execute) + def test_dumb_query(self, mock_sync_execute: MagicMock): + response = execute_hogql_query("select query_start_time from query_log limit 10", self.team) + + ch_query = f"""SELECT + query_log.query_start_time AS query_start_time +FROM + (SELECT + toTimeZone(raw_query_log.event_time, %(hogql_val_0)s) AS query_start_time + FROM + clusterAllReplicas(posthog, system.query_log) AS raw_query_log + WHERE + and(ifNull(equals({self.team.pk}, JSONExtractInt(raw_query_log.log_comment, %(hogql_val_1)s)), 0), in(raw_query_log.type, [%(hogql_val_2)s, %(hogql_val_3)s, %(hogql_val_4)s]), raw_query_log.is_initial_query)) AS query_log +LIMIT 10 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, max_ast_elements=4000000, max_expanded_ast_elements=4000000, max_bytes_before_external_group_by=0""" + + from unittest.mock import ANY + + mock_sync_execute.assert_called_once_with( + ch_query, + { + "hogql_val_0": "UTC", + "hogql_val_1": "user_id", + "hogql_val_2": "QueryFinish", + "hogql_val_3": "ExceptionBeforeStart", + "hogql_val_4": "ExceptionWhileProcessing", + }, + with_column_types=True, + workload=ANY, + team_id=self.team.pk, + readonly=True, + ) + assert response.results is not None diff --git a/posthog/hogql/database/schema/util/test/test_person_where_clause_extractor.py b/posthog/hogql/database/schema/util/test/test_person_where_clause_extractor.py index 91ba9dca6094d5..3b281c17dd8177 100644 --- a/posthog/hogql/database/schema/util/test/test_person_where_clause_extractor.py +++ b/posthog/hogql/database/schema/util/test/test_person_where_clause_extractor.py @@ -59,12 +59,13 @@ def get_clause(self, query: str): assert isinstance(new_select.select_from, ast.JoinExpr) assert isinstance(new_select.select_from.next_join, ast.JoinExpr) assert isinstance(new_select.select_from.next_join.next_join, ast.JoinExpr) - assert isinstance(new_select.select_from.next_join.next_join.table, ast.SelectQuery) + assert isinstance(new_select.select_from.next_join.next_join.next_join, ast.JoinExpr) + assert isinstance(new_select.select_from.next_join.next_join.next_join.table, ast.SelectQuery) - assert new_select.select_from.next_join.alias == "events__pdi" - assert new_select.select_from.next_join.next_join.alias == "events__pdi__person" + assert new_select.select_from.next_join.next_join.alias == "events__pdi" + assert new_select.select_from.next_join.next_join.next_join.alias == "events__pdi__person" - where = new_select.select_from.next_join.next_join.table.where + where = new_select.select_from.next_join.next_join.next_join.table.where if where is None: return None diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr index 0eaa25fbe1f07f..d5d1d67d7c8b53 100644 --- a/posthog/hogql/database/test/__snapshots__/test_database.ambr +++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr @@ -478,6 +478,40 @@ "schema_valid": true, "table": "person_distinct_id_overrides", "type": "lazy_table" + }, + "event_issue_id": { + "chain": null, + "fields": null, + "hogql_value": "event_issue_id", + "id": null, + "name": "event_issue_id", + "schema_valid": true, + "table": null, + "type": "expression" + }, + "exception_issue_override": { + "chain": null, + "fields": [ + "team_id", + "fingerprint", + "issue_id" + ], + "hogql_value": "exception_issue_override", + "id": "exception_issue_override", + "name": "exception_issue_override", + "schema_valid": true, + "table": "error_tracking_issue_fingerprint_overrides", + "type": "lazy_table" + }, + "issue_id": { + "chain": null, + "fields": null, + "hogql_value": "issue_id", + "id": null, + "name": "issue_id", + "schema_valid": true, + "table": null, + "type": "expression" } }, "id": "events", @@ -848,7 +882,10 @@ "elements_chain_ids", "elements_chain_elements", "event_person_id", - "override" + "override", + "event_issue_id", + "exception_issue_override", + "issue_id" ], "hogql_value": "events", "id": "events", @@ -1667,6 +1704,153 @@ "id": "heatmaps", "name": "heatmaps", "type": "posthog" + }, + "query_log": { + "fields": { + "query_id": { + "chain": null, + "fields": null, + "hogql_value": "query_id", + "id": null, + "name": "query_id", + "schema_valid": true, + "table": null, + "type": "string" + }, + "query": { + "chain": null, + "fields": null, + "hogql_value": "query", + "id": null, + "name": "query", + "schema_valid": true, + "table": null, + "type": "string" + }, + "query_start_time": { + "chain": null, + "fields": null, + "hogql_value": "query_start_time", + "id": null, + "name": "query_start_time", + "schema_valid": true, + "table": null, + "type": "datetime" + }, + "query_duration_ms": { + "chain": null, + "fields": null, + "hogql_value": "query_duration_ms", + "id": null, + "name": "query_duration_ms", + "schema_valid": true, + "table": null, + "type": "float" + }, + "created_by": { + "chain": null, + "fields": null, + "hogql_value": "created_by", + "id": null, + "name": "created_by", + "schema_valid": true, + "table": null, + "type": "integer" + }, + "read_rows": { + "chain": null, + "fields": null, + "hogql_value": "read_rows", + "id": null, + "name": "read_rows", + "schema_valid": true, + "table": null, + "type": "integer" + }, + "read_bytes": { + "chain": null, + "fields": null, + "hogql_value": "read_bytes", + "id": null, + "name": "read_bytes", + "schema_valid": true, + "table": null, + "type": "integer" + }, + "result_rows": { + "chain": null, + "fields": null, + "hogql_value": "result_rows", + "id": null, + "name": "result_rows", + "schema_valid": true, + "table": null, + "type": "integer" + }, + "result_bytes": { + "chain": null, + "fields": null, + "hogql_value": "result_bytes", + "id": null, + "name": "result_bytes", + "schema_valid": true, + "table": null, + "type": "integer" + }, + "memory_usage": { + "chain": null, + "fields": null, + "hogql_value": "memory_usage", + "id": null, + "name": "memory_usage", + "schema_valid": true, + "table": null, + "type": "integer" + }, + "status": { + "chain": null, + "fields": null, + "hogql_value": "status", + "id": null, + "name": "status", + "schema_valid": true, + "table": null, + "type": "string" + }, + "kind": { + "chain": null, + "fields": null, + "hogql_value": "kind", + "id": null, + "name": "kind", + "schema_valid": true, + "table": null, + "type": "string" + }, + "query_type": { + "chain": null, + "fields": null, + "hogql_value": "query_type", + "id": null, + "name": "query_type", + "schema_valid": true, + "table": null, + "type": "string" + }, + "is_personal_api_key_request": { + "chain": null, + "fields": null, + "hogql_value": "is_personal_api_key_request", + "id": null, + "name": "is_personal_api_key_request", + "schema_valid": true, + "table": null, + "type": "boolean" + } + }, + "id": "query_log", + "name": "query_log", + "type": "posthog" } } ''' @@ -2105,6 +2289,40 @@ "schema_valid": true, "table": null, "type": "array" + }, + "event_issue_id": { + "chain": null, + "fields": null, + "hogql_value": "event_issue_id", + "id": null, + "name": "event_issue_id", + "schema_valid": true, + "table": null, + "type": "expression" + }, + "exception_issue_override": { + "chain": null, + "fields": [ + "team_id", + "fingerprint", + "issue_id" + ], + "hogql_value": "exception_issue_override", + "id": "exception_issue_override", + "name": "exception_issue_override", + "schema_valid": true, + "table": "error_tracking_issue_fingerprint_overrides", + "type": "lazy_table" + }, + "issue_id": { + "chain": null, + "fields": null, + "hogql_value": "issue_id", + "id": null, + "name": "issue_id", + "schema_valid": true, + "table": null, + "type": "expression" } }, "id": "events", @@ -2473,7 +2691,10 @@ "elements_chain_href", "elements_chain_texts", "elements_chain_ids", - "elements_chain_elements" + "elements_chain_elements", + "event_issue_id", + "exception_issue_override", + "issue_id" ], "hogql_value": "events", "id": "events", @@ -3292,6 +3513,153 @@ "id": "heatmaps", "name": "heatmaps", "type": "posthog" + }, + "query_log": { + "fields": { + "query_id": { + "chain": null, + "fields": null, + "hogql_value": "query_id", + "id": null, + "name": "query_id", + "schema_valid": true, + "table": null, + "type": "string" + }, + "query": { + "chain": null, + "fields": null, + "hogql_value": "query", + "id": null, + "name": "query", + "schema_valid": true, + "table": null, + "type": "string" + }, + "query_start_time": { + "chain": null, + "fields": null, + "hogql_value": "query_start_time", + "id": null, + "name": "query_start_time", + "schema_valid": true, + "table": null, + "type": "datetime" + }, + "query_duration_ms": { + "chain": null, + "fields": null, + "hogql_value": "query_duration_ms", + "id": null, + "name": "query_duration_ms", + "schema_valid": true, + "table": null, + "type": "float" + }, + "created_by": { + "chain": null, + "fields": null, + "hogql_value": "created_by", + "id": null, + "name": "created_by", + "schema_valid": true, + "table": null, + "type": "integer" + }, + "read_rows": { + "chain": null, + "fields": null, + "hogql_value": "read_rows", + "id": null, + "name": "read_rows", + "schema_valid": true, + "table": null, + "type": "integer" + }, + "read_bytes": { + "chain": null, + "fields": null, + "hogql_value": "read_bytes", + "id": null, + "name": "read_bytes", + "schema_valid": true, + "table": null, + "type": "integer" + }, + "result_rows": { + "chain": null, + "fields": null, + "hogql_value": "result_rows", + "id": null, + "name": "result_rows", + "schema_valid": true, + "table": null, + "type": "integer" + }, + "result_bytes": { + "chain": null, + "fields": null, + "hogql_value": "result_bytes", + "id": null, + "name": "result_bytes", + "schema_valid": true, + "table": null, + "type": "integer" + }, + "memory_usage": { + "chain": null, + "fields": null, + "hogql_value": "memory_usage", + "id": null, + "name": "memory_usage", + "schema_valid": true, + "table": null, + "type": "integer" + }, + "status": { + "chain": null, + "fields": null, + "hogql_value": "status", + "id": null, + "name": "status", + "schema_valid": true, + "table": null, + "type": "string" + }, + "kind": { + "chain": null, + "fields": null, + "hogql_value": "kind", + "id": null, + "name": "kind", + "schema_valid": true, + "table": null, + "type": "string" + }, + "query_type": { + "chain": null, + "fields": null, + "hogql_value": "query_type", + "id": null, + "name": "query_type", + "schema_valid": true, + "table": null, + "type": "string" + }, + "is_personal_api_key_request": { + "chain": null, + "fields": null, + "hogql_value": "is_personal_api_key_request", + "id": null, + "name": "is_personal_api_key_request", + "schema_valid": true, + "table": null, + "type": "boolean" + } + }, + "id": "query_log", + "name": "query_log", + "type": "posthog" } } ''' diff --git a/posthog/hogql/functions/mapping.py b/posthog/hogql/functions/mapping.py index a422e16dc989c7..43c9bb4e7c1506 100644 --- a/posthog/hogql/functions/mapping.py +++ b/posthog/hogql/functions/mapping.py @@ -398,7 +398,7 @@ def compare_types(arg_types: list[ConstantType], sig_arg_types: tuple[ConstantTy "_toUInt64": HogQLFunctionMeta("toUInt64", 1, 1, signatures=[((UnknownType(),), IntegerType())]), "_toUInt128": HogQLFunctionMeta("toUInt128", 1, 1), "toFloat": HogQLFunctionMeta("accurateCastOrNull", 1, 1, suffix_args=[ast.Constant(value="Float64")]), - "toDecimal": HogQLFunctionMeta("accurateCastOrNull", 1, 1, suffix_args=[ast.Constant(value="Decimal64")]), + "toDecimal": HogQLFunctionMeta("toDecimal64OrNull", 2, 2), "toDate": HogQLFunctionMeta( "toDateOrNull", 1, diff --git a/posthog/hogql/printer.py b/posthog/hogql/printer.py index 418e2f63548075..27a4d85a84c66f 100644 --- a/posthog/hogql/printer.py +++ b/posthog/hogql/printer.py @@ -18,6 +18,7 @@ MAX_SELECT_RETURNED_ROWS, HogQLGlobalSettings, ) +from posthog.hogql.database.schema.query_log import RawQueryLogTable from posthog.hogql.functions import ( ADD_OR_NULL_DATETIME_FUNCTIONS, FIRST_ARG_DATETIME_FUNCTIONS, @@ -494,7 +495,9 @@ def visit_join_expr(self, node: ast.JoinExpr) -> JoinExprResponse: else: sql = table_type.table.to_printed_hogql() - if isinstance(table_type.table, FunctionCallTable) and not isinstance(table_type.table, S3Table): + if isinstance(table_type.table, FunctionCallTable) and not ( + isinstance(table_type.table, S3Table) or isinstance(table_type.table, RawQueryLogTable) + ): if node.table_args is None: raise QueryError(f"Table function '{table_type.table.name}' requires arguments") @@ -1157,7 +1160,7 @@ def visit_call(self, node: ast.Call): args_part = f"({', '.join(args)})" return f"{relevant_clickhouse_name}{params_part}{args_part}" else: - return f"{node.name}({', '.join([self.visit(arg) for arg in node.args ])})" + return f"{node.name}({', '.join([self.visit(arg) for arg in node.args])})" elif func_meta := find_hogql_posthog_function(node.name): validate_function_args(node.args, func_meta.min_args, func_meta.max_args, node.name) args = [self.visit(arg) for arg in node.args] diff --git a/posthog/hogql/test/__snapshots__/test_resolver.ambr b/posthog/hogql/test/__snapshots__/test_resolver.ambr index cfc878e6004b8a..a9f25124489a28 100644 --- a/posthog/hogql/test/__snapshots__/test_resolver.ambr +++ b/posthog/hogql/test/__snapshots__/test_resolver.ambr @@ -37,7 +37,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -48,6 +50,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -161,6 +164,22 @@ table_type: } }, + event_issue_id: { + alias: "event_issue_id" + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "toUUID" + return_type: { + data_type: "unknown" + nullable: True + } + } + }, event_person_id: { alias: "event_person_id" type: { @@ -168,6 +187,30 @@ table_type: } }, + issue_id: { + alias: "issue_id" + type: { + arg_types: [ + { + data_type: "unknown" + nullable: True + }, + { + data_type: "str" + nullable: False + }, + { + data_type: "unknown" + nullable: True + } + ] + name: "if" + return_type: { + data_type: "unknown" + nullable: True + } + } + }, person_id: { alias: "person_id" type: { @@ -549,6 +592,40 @@ alias: "event_person_id" type: } + }, + { + alias: "event_issue_id" + expr: { + chain: [ + "event_issue_id" + ] + type: { + name: "event_issue_id" + table_type: + } + } + hidden: True + type: { + alias: "event_issue_id" + type: + } + }, + { + alias: "issue_id" + expr: { + chain: [ + "issue_id" + ] + type: { + name: "issue_id" + table_type: + } + } + hidden: True + type: { + alias: "issue_id" + type: + } } ] select_from: { @@ -895,6 +972,210 @@ } hidden: True type: + }, + { + alias: "event_issue_id" + expr: { + args: [ + { + alias: "$exception_issue_id" + expr: { + chain: [ + "properties", + "$exception_issue_id" + ] + end: 37 + start: 7 + type: { + chain: [ + "$exception_issue_id" + ] + field_type: { + name: "properties" + table_type: + } + } + } + hidden: True + type: { + alias: "$exception_issue_id" + type: + } + } + ] + distinct: False + end: 38 + name: "toUUID" + start: 0 + type: + } + hidden: True + type: + }, + { + alias: "issue_id" + expr: { + args: [ + { + args: [ + { + args: [ + { + alias: "issue_id" + expr: { + chain: [ + "exception_issue_override", + "issue_id" + ] + type: { + name: "issue_id" + table_type: { + field: "exception_issue_override" + lazy_join: { + from_field: [ + "fingerprint" + ], + join_function: , + join_table: { + fields: { + fingerprint: {}, + issue_id: {}, + team_id: {} + } + }, + to_field: None + } + table_type: + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + } + ] + distinct: False + name: "empty" + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "empty" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + ] + distinct: False + name: "not" + type: { + arg_types: [ + + ] + name: "not" + return_type: + } + }, + { + alias: "issue_id" + expr: { + chain: [ + "exception_issue_override", + "issue_id" + ] + type: { + name: "issue_id" + table_type: { + field: "exception_issue_override" + lazy_join: { + from_field: [ + "fingerprint" + ], + join_function: , + join_table: { + fields: { + fingerprint: {}, + issue_id: {}, + team_id: {} + } + }, + to_field: None + } + table_type: + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + }, + { + alias: "event_issue_id" + expr: { + args: [ + { + alias: "$exception_issue_id" + expr: { + chain: [ + "properties", + "$exception_issue_id" + ] + end: 37 + start: 7 + type: { + chain: [ + "$exception_issue_id" + ] + field_type: { + name: "properties" + table_type: + } + } + } + hidden: True + type: { + alias: "$exception_issue_id" + type: + } + } + ] + distinct: False + end: 38 + name: "toUUID" + start: 0 + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "toUUID" + return_type: + } + } + hidden: True + type: { + alias: "event_issue_id" + type: + } + } + ] + distinct: False + name: "if" + type: + } + hidden: True + type: } ] select_from: { @@ -931,7 +1212,9 @@ elements_chain_ids: , elements_chain_texts: , event: , + event_issue_id: , event_person_id: , + issue_id: , person_id: , properties: , timestamp: , @@ -973,7 +1256,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -985,6 +1270,7 @@ group_3: {}, group_4: {}, hidden_field: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -1467,44 +1753,288 @@ alias: "event_person_id" type: } - } - ] - select_from: { - table: { - chain: [ - "events" - ] - type: - } - type: - } - type: { - aliases: {} - anonymous_tables: [] - columns: { - $group_0: , - $group_1: , - $group_2: , - $group_3: , - $group_4: , - $session_id: , - $window_id: , - created_at: , - distinct_id: , - elements_chain: , - elements_chain_elements: , - elements_chain_href: , - elements_chain_ids: , - elements_chain_texts: , - event: , - event_person_id: , - person_id: , - properties: , - timestamp: , - uuid: - } - ctes: {} - tables: { + }, + { + alias: "event_issue_id" + expr: { + args: [ + { + alias: "$exception_issue_id" + expr: { + chain: [ + "properties", + "$exception_issue_id" + ] + end: 37 + start: 7 + type: { + chain: [ + "$exception_issue_id" + ] + field_type: { + name: "properties" + table_type: + } + } + } + hidden: True + type: { + alias: "$exception_issue_id" + type: + } + } + ] + distinct: False + end: 38 + name: "toUUID" + start: 0 + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "toUUID" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + hidden: True + type: { + alias: "event_issue_id" + type: + } + }, + { + alias: "issue_id" + expr: { + args: [ + { + args: [ + { + args: [ + { + alias: "issue_id" + expr: { + chain: [ + "exception_issue_override", + "issue_id" + ] + type: { + name: "issue_id" + table_type: { + field: "exception_issue_override" + lazy_join: { + from_field: [ + "fingerprint" + ], + join_function: , + join_table: { + fields: { + fingerprint: {}, + issue_id: {}, + team_id: {} + } + }, + to_field: None + } + table_type: + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + } + ] + distinct: False + name: "empty" + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "empty" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + ] + distinct: False + name: "not" + type: { + arg_types: [ + + ] + name: "not" + return_type: { + data_type: "unknown" + nullable: True + } + } + }, + { + alias: "issue_id" + expr: { + chain: [ + "exception_issue_override", + "issue_id" + ] + type: { + name: "issue_id" + table_type: { + field: "exception_issue_override" + lazy_join: { + from_field: [ + "fingerprint" + ], + join_function: , + join_table: { + fields: { + fingerprint: {}, + issue_id: {}, + team_id: {} + } + }, + to_field: None + } + table_type: + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + }, + { + alias: "event_issue_id" + expr: { + args: [ + { + alias: "$exception_issue_id" + expr: { + chain: [ + "properties", + "$exception_issue_id" + ] + end: 37 + start: 7 + type: { + chain: [ + "$exception_issue_id" + ] + field_type: { + name: "properties" + table_type: + } + } + } + hidden: True + type: { + alias: "$exception_issue_id" + type: + } + } + ] + distinct: False + end: 38 + name: "toUUID" + start: 0 + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "toUUID" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + hidden: True + type: { + alias: "event_issue_id" + type: + } + } + ] + distinct: False + name: "if" + type: { + arg_types: [ + , + { + data_type: "str" + nullable: False + }, + + ] + name: "if" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + $group_0: , + $group_1: , + $group_2: , + $group_3: , + $group_4: , + $session_id: , + $window_id: , + created_at: , + distinct_id: , + elements_chain: , + elements_chain_elements: , + elements_chain_href: , + elements_chain_ids: , + elements_chain_texts: , + event: , + event_issue_id: , + event_person_id: , + issue_id: , + person_id: , + properties: , + timestamp: , + uuid: + } + ctes: {} + tables: { events: } } @@ -2224,7 +2754,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -2235,6 +2767,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -2348,6 +2881,22 @@ table_type: } }, + event_issue_id: { + alias: "event_issue_id" + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "toUUID" + return_type: { + data_type: "unknown" + nullable: True + } + } + }, event_person_id: { alias: "event_person_id" type: { @@ -2355,8 +2904,8 @@ table_type: } }, - person_id: { - alias: "person_id" + issue_id: { + alias: "issue_id" type: { arg_types: [ { @@ -2368,8 +2917,8 @@ nullable: False }, { - data_type: "str" - nullable: False + data_type: "unknown" + nullable: True } ] name: "if" @@ -2379,17 +2928,41 @@ } } }, - properties: { - alias: "properties" - type: { - name: "properties" - table_type: - } - }, - timestamp: { - alias: "timestamp" + person_id: { + alias: "person_id" type: { - name: "timestamp" + arg_types: [ + { + data_type: "unknown" + nullable: True + }, + { + data_type: "str" + nullable: False + }, + { + data_type: "str" + nullable: False + } + ] + name: "if" + return_type: { + data_type: "unknown" + nullable: True + } + } + }, + properties: { + alias: "properties" + type: { + name: "properties" + table_type: + } + }, + timestamp: { + alias: "timestamp" + type: { + name: "timestamp" table_type: } }, @@ -2736,6 +3309,40 @@ alias: "event_person_id" type: } + }, + { + alias: "event_issue_id" + expr: { + chain: [ + "event_issue_id" + ] + type: { + name: "event_issue_id" + table_type: + } + } + hidden: True + type: { + alias: "event_issue_id" + type: + } + }, + { + alias: "issue_id" + expr: { + chain: [ + "issue_id" + ] + type: { + name: "issue_id" + table_type: + } + } + hidden: True + type: { + alias: "issue_id" + type: + } } ] select_from: { @@ -3083,6 +3690,210 @@ } hidden: True type: + }, + { + alias: "event_issue_id" + expr: { + args: [ + { + alias: "$exception_issue_id" + expr: { + chain: [ + "properties", + "$exception_issue_id" + ] + end: 37 + start: 7 + type: { + chain: [ + "$exception_issue_id" + ] + field_type: { + name: "properties" + table_type: + } + } + } + hidden: True + type: { + alias: "$exception_issue_id" + type: + } + } + ] + distinct: False + end: 38 + name: "toUUID" + start: 0 + type: + } + hidden: True + type: + }, + { + alias: "issue_id" + expr: { + args: [ + { + args: [ + { + args: [ + { + alias: "issue_id" + expr: { + chain: [ + "exception_issue_override", + "issue_id" + ] + type: { + name: "issue_id" + table_type: { + field: "exception_issue_override" + lazy_join: { + from_field: [ + "fingerprint" + ], + join_function: , + join_table: { + fields: { + fingerprint: {}, + issue_id: {}, + team_id: {} + } + }, + to_field: None + } + table_type: + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + } + ] + distinct: False + name: "empty" + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "empty" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + ] + distinct: False + name: "not" + type: { + arg_types: [ + + ] + name: "not" + return_type: + } + }, + { + alias: "issue_id" + expr: { + chain: [ + "exception_issue_override", + "issue_id" + ] + type: { + name: "issue_id" + table_type: { + field: "exception_issue_override" + lazy_join: { + from_field: [ + "fingerprint" + ], + join_function: , + join_table: { + fields: { + fingerprint: {}, + issue_id: {}, + team_id: {} + } + }, + to_field: None + } + table_type: + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + }, + { + alias: "event_issue_id" + expr: { + args: [ + { + alias: "$exception_issue_id" + expr: { + chain: [ + "properties", + "$exception_issue_id" + ] + end: 37 + start: 7 + type: { + chain: [ + "$exception_issue_id" + ] + field_type: { + name: "properties" + table_type: + } + } + } + hidden: True + type: { + alias: "$exception_issue_id" + type: + } + } + ] + distinct: False + end: 38 + name: "toUUID" + start: 0 + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "toUUID" + return_type: + } + } + hidden: True + type: { + alias: "event_issue_id" + type: + } + } + ] + distinct: False + name: "if" + type: + } + hidden: True + type: } ] select_from: { @@ -3126,7 +3937,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -3137,6 +3950,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -3619,48 +4433,292 @@ alias: "event_person_id" type: } - } - ] - select_from: { - table: { - chain: [ - "events" - ] - type: - } - type: - } - type: { - aliases: {} - anonymous_tables: [] - columns: { - $group_0: , - $group_1: , - $group_2: , - $group_3: , - $group_4: , - $session_id: , - $window_id: , - created_at: , - distinct_id: , - elements_chain: , - elements_chain_elements: , - elements_chain_href: , - elements_chain_ids: , - elements_chain_texts: , - event: , - event_person_id: , - person_id: , - properties: , - timestamp: , - uuid: - } - ctes: {} - tables: { - events: - } - } - } + }, + { + alias: "event_issue_id" + expr: { + args: [ + { + alias: "$exception_issue_id" + expr: { + chain: [ + "properties", + "$exception_issue_id" + ] + end: 37 + start: 7 + type: { + chain: [ + "$exception_issue_id" + ] + field_type: { + name: "properties" + table_type: + } + } + } + hidden: True + type: { + alias: "$exception_issue_id" + type: + } + } + ] + distinct: False + end: 38 + name: "toUUID" + start: 0 + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "toUUID" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + hidden: True + type: { + alias: "event_issue_id" + type: + } + }, + { + alias: "issue_id" + expr: { + args: [ + { + args: [ + { + args: [ + { + alias: "issue_id" + expr: { + chain: [ + "exception_issue_override", + "issue_id" + ] + type: { + name: "issue_id" + table_type: { + field: "exception_issue_override" + lazy_join: { + from_field: [ + "fingerprint" + ], + join_function: , + join_table: { + fields: { + fingerprint: {}, + issue_id: {}, + team_id: {} + } + }, + to_field: None + } + table_type: + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + } + ] + distinct: False + name: "empty" + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "empty" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + ] + distinct: False + name: "not" + type: { + arg_types: [ + + ] + name: "not" + return_type: { + data_type: "unknown" + nullable: True + } + } + }, + { + alias: "issue_id" + expr: { + chain: [ + "exception_issue_override", + "issue_id" + ] + type: { + name: "issue_id" + table_type: { + field: "exception_issue_override" + lazy_join: { + from_field: [ + "fingerprint" + ], + join_function: , + join_table: { + fields: { + fingerprint: {}, + issue_id: {}, + team_id: {} + } + }, + to_field: None + } + table_type: + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + }, + { + alias: "event_issue_id" + expr: { + args: [ + { + alias: "$exception_issue_id" + expr: { + chain: [ + "properties", + "$exception_issue_id" + ] + end: 37 + start: 7 + type: { + chain: [ + "$exception_issue_id" + ] + field_type: { + name: "properties" + table_type: + } + } + } + hidden: True + type: { + alias: "$exception_issue_id" + type: + } + } + ] + distinct: False + end: 38 + name: "toUUID" + start: 0 + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "toUUID" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + hidden: True + type: { + alias: "event_issue_id" + type: + } + } + ] + distinct: False + name: "if" + type: { + arg_types: [ + , + { + data_type: "str" + nullable: False + }, + + ] + name: "if" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + $group_0: , + $group_1: , + $group_2: , + $group_3: , + $group_4: , + $session_id: , + $window_id: , + created_at: , + distinct_id: , + elements_chain: , + elements_chain_elements: , + elements_chain_href: , + elements_chain_ids: , + elements_chain_texts: , + event: , + event_issue_id: , + event_person_id: , + issue_id: , + person_id: , + properties: , + timestamp: , + uuid: + } + ctes: {} + tables: { + events: + } + } + } set_operator: "UNION ALL" } ] @@ -3694,7 +4752,9 @@ elements_chain_ids: , elements_chain_texts: , event: , + event_issue_id: , event_person_id: , + issue_id: , person_id: , properties: , timestamp: , @@ -3950,7 +5010,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -3961,6 +5023,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -4443,42 +5506,286 @@ alias: "event_person_id" type: } - } - ] - select_from: { - table: { - chain: [ - "events" - ] - type: - } - type: - } - type: { - aliases: {} - anonymous_tables: [] - columns: { - $group_0: , - $group_1: , - $group_2: , - $group_3: , - $group_4: , - $session_id: , - $window_id: , - created_at: , - distinct_id: , - elements_chain: , - elements_chain_elements: , - elements_chain_href: , - elements_chain_ids: , - elements_chain_texts: , - event: , - event_person_id: , - person_id: , - properties: , - timestamp: , - uuid: - } + }, + { + alias: "event_issue_id" + expr: { + args: [ + { + alias: "$exception_issue_id" + expr: { + chain: [ + "properties", + "$exception_issue_id" + ] + end: 37 + start: 7 + type: { + chain: [ + "$exception_issue_id" + ] + field_type: { + name: "properties" + table_type: + } + } + } + hidden: True + type: { + alias: "$exception_issue_id" + type: + } + } + ] + distinct: False + end: 38 + name: "toUUID" + start: 0 + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "toUUID" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + hidden: True + type: { + alias: "event_issue_id" + type: + } + }, + { + alias: "issue_id" + expr: { + args: [ + { + args: [ + { + args: [ + { + alias: "issue_id" + expr: { + chain: [ + "exception_issue_override", + "issue_id" + ] + type: { + name: "issue_id" + table_type: { + field: "exception_issue_override" + lazy_join: { + from_field: [ + "fingerprint" + ], + join_function: , + join_table: { + fields: { + fingerprint: {}, + issue_id: {}, + team_id: {} + } + }, + to_field: None + } + table_type: + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + } + ] + distinct: False + name: "empty" + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "empty" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + ] + distinct: False + name: "not" + type: { + arg_types: [ + + ] + name: "not" + return_type: { + data_type: "unknown" + nullable: True + } + } + }, + { + alias: "issue_id" + expr: { + chain: [ + "exception_issue_override", + "issue_id" + ] + type: { + name: "issue_id" + table_type: { + field: "exception_issue_override" + lazy_join: { + from_field: [ + "fingerprint" + ], + join_function: , + join_table: { + fields: { + fingerprint: {}, + issue_id: {}, + team_id: {} + } + }, + to_field: None + } + table_type: + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + }, + { + alias: "event_issue_id" + expr: { + args: [ + { + alias: "$exception_issue_id" + expr: { + chain: [ + "properties", + "$exception_issue_id" + ] + end: 37 + start: 7 + type: { + chain: [ + "$exception_issue_id" + ] + field_type: { + name: "properties" + table_type: + } + } + } + hidden: True + type: { + alias: "$exception_issue_id" + type: + } + } + ] + distinct: False + end: 38 + name: "toUUID" + start: 0 + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "toUUID" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + hidden: True + type: { + alias: "event_issue_id" + type: + } + } + ] + distinct: False + name: "if" + type: { + arg_types: [ + , + { + data_type: "str" + nullable: False + }, + + ] + name: "if" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + } + ] + select_from: { + table: { + chain: [ + "events" + ] + type: + } + type: + } + type: { + aliases: {} + anonymous_tables: [] + columns: { + $group_0: , + $group_1: , + $group_2: , + $group_3: , + $group_4: , + $session_id: , + $window_id: , + created_at: , + distinct_id: , + elements_chain: , + elements_chain_elements: , + elements_chain_href: , + elements_chain_ids: , + elements_chain_texts: , + event: , + event_issue_id: , + event_person_id: , + issue_id: , + person_id: , + properties: , + timestamp: , + uuid: + } ctes: {} tables: { events: @@ -4519,7 +5826,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -4530,6 +5839,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -5013,6 +6323,248 @@ alias: "event_person_id" type: } + }, + { + alias: "event_issue_id" + expr: { + args: [ + { + alias: "$exception_issue_id" + expr: { + chain: [ + "properties", + "$exception_issue_id" + ] + end: 37 + start: 7 + type: { + chain: [ + "$exception_issue_id" + ] + field_type: { + name: "properties" + table_type: + } + } + } + hidden: True + type: { + alias: "$exception_issue_id" + type: + } + } + ] + distinct: False + end: 38 + name: "toUUID" + start: 0 + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "toUUID" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + hidden: True + type: { + alias: "event_issue_id" + type: + } + }, + { + alias: "issue_id" + expr: { + args: [ + { + args: [ + { + args: [ + { + alias: "issue_id" + expr: { + chain: [ + "exception_issue_override", + "issue_id" + ] + type: { + name: "issue_id" + table_type: { + field: "exception_issue_override" + lazy_join: { + from_field: [ + "fingerprint" + ], + join_function: , + join_table: { + fields: { + fingerprint: {}, + issue_id: {}, + team_id: {} + } + }, + to_field: None + } + table_type: + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + } + ] + distinct: False + name: "empty" + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "empty" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + ] + distinct: False + name: "not" + type: { + arg_types: [ + + ] + name: "not" + return_type: { + data_type: "unknown" + nullable: True + } + } + }, + { + alias: "issue_id" + expr: { + chain: [ + "exception_issue_override", + "issue_id" + ] + type: { + name: "issue_id" + table_type: { + field: "exception_issue_override" + lazy_join: { + from_field: [ + "fingerprint" + ], + join_function: , + join_table: { + fields: { + fingerprint: {}, + issue_id: {}, + team_id: {} + } + }, + to_field: None + } + table_type: + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } + }, + { + alias: "event_issue_id" + expr: { + args: [ + { + alias: "$exception_issue_id" + expr: { + chain: [ + "properties", + "$exception_issue_id" + ] + end: 37 + start: 7 + type: { + chain: [ + "$exception_issue_id" + ] + field_type: { + name: "properties" + table_type: + } + } + } + hidden: True + type: { + alias: "$exception_issue_id" + type: + } + } + ] + distinct: False + end: 38 + name: "toUUID" + start: 0 + type: { + arg_types: [ + { + data_type: "str" + nullable: False + } + ] + name: "toUUID" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + hidden: True + type: { + alias: "event_issue_id" + type: + } + } + ] + distinct: False + name: "if" + type: { + arg_types: [ + , + { + data_type: "str" + nullable: False + }, + + ] + name: "if" + return_type: { + data_type: "unknown" + nullable: True + } + } + } + hidden: True + type: { + alias: "issue_id" + type: + } } ] select_from: { @@ -5044,7 +6596,9 @@ elements_chain_ids: , elements_chain_texts: , event: , + event_issue_id: , event_person_id: , + issue_id: , person_id: , properties: , timestamp: , @@ -5090,7 +6644,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -5101,6 +6657,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -5195,7 +6752,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -5206,6 +6765,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -5580,7 +7140,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -5591,6 +7153,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -5719,7 +7282,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -5730,6 +7295,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -5862,7 +7428,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -5873,6 +7441,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -6041,7 +7610,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -6052,6 +7623,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -6225,7 +7797,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -6236,6 +7810,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -6362,7 +7937,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -6373,6 +7950,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -6500,7 +8078,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -6511,6 +8091,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -6625,7 +8206,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -6636,6 +8219,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -6836,7 +8420,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -6847,6 +8433,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -6938,7 +8525,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -6949,6 +8538,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, @@ -7052,7 +8642,9 @@ elements_chain_ids: {}, elements_chain_texts: {}, event: {}, + event_issue_id: {}, event_person_id: {}, + exception_issue_override: {}, goe_0: {}, goe_1: {}, goe_2: {}, @@ -7063,6 +8655,7 @@ group_2: {}, group_3: {}, group_4: {}, + issue_id: {}, override: {}, pdi: {}, person: {}, diff --git a/posthog/hogql/test/test_printer.py b/posthog/hogql/test/test_printer.py index 4f2422263d0c84..dc5a1b3831e466 100644 --- a/posthog/hogql/test/test_printer.py +++ b/posthog/hogql/test/test_printer.py @@ -846,9 +846,7 @@ def test_functions(self): self._expr("toUUID('470f9b15-ff43-402a-af9f-2ed7c526a6cf')", context), "accurateCastOrNull(%(hogql_val_4)s, %(hogql_val_5)s)", ) - self.assertEqual( - self._expr("toDecimal('3.14')", context), "accurateCastOrNull(%(hogql_val_6)s, %(hogql_val_7)s)" - ) + self.assertEqual(self._expr("toDecimal('3.14', 2)", context), "toDecimal64OrNull(%(hogql_val_6)s, 2)") self.assertEqual(self._expr("quantile(0.95)( event )"), "quantile(0.95)(events.event)") def test_expr_parse_errors(self): diff --git a/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr b/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr index cf6c7c1e02e6b5..ce7faf76120569 100644 --- a/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr +++ b/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr @@ -42,7 +42,7 @@ FROM events LEFT JOIN ( SELECT person_id AS cohort_person_id, 1 AS matched, cohort_id FROM static_cohort_people - WHERE in(cohort_id, [6])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id) + WHERE in(cohort_id, [8])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id) WHERE and(1, equals(__in_cohort.matched, 1)) LIMIT 100 ''' @@ -66,7 +66,7 @@ FROM events LEFT JOIN ( SELECT person_id AS cohort_person_id, 1 AS matched, cohort_id FROM static_cohort_people - WHERE in(cohort_id, [7])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id) + WHERE in(cohort_id, [9])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id) WHERE and(1, equals(__in_cohort.matched, 1)) LIMIT 100 ''' diff --git a/posthog/hogql_queries/error_tracking_query_runner.py b/posthog/hogql_queries/error_tracking_query_runner.py index c6c2231d366b17..88f66050e52e76 100644 --- a/posthog/hogql_queries/error_tracking_query_runner.py +++ b/posthog/hogql_queries/error_tracking_query_runner.py @@ -38,7 +38,7 @@ def to_query(self) -> ast.SelectQuery: select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), where=self.where(), order_by=self.order_by, - group_by=[ast.Field(chain=["properties", "$exception_issue_id"])], + group_by=[ast.Field(chain=["issue_id"])], ) def select(self): @@ -54,7 +54,7 @@ def select(self): ), ast.Alias(alias="last_seen", expr=ast.Call(name="max", args=[ast.Field(chain=["timestamp"])])), ast.Alias(alias="first_seen", expr=ast.Call(name="min", args=[ast.Field(chain=["timestamp"])])), - ast.Alias(alias="id", expr=ast.Field(chain=["properties", "$exception_issue_id"])), + ast.Alias(alias="id", expr=ast.Field(chain=["issue_id"])), ] if self.query.select: @@ -71,7 +71,7 @@ def where(self): ), ast.Call( name="isNotNull", - args=[ast.Field(chain=["properties", "$exception_issue_id"])], + args=[ast.Field(chain=["issue_id"])], ), ast.Placeholder(expr=ast.Field(chain=["filters"])), ] @@ -80,7 +80,7 @@ def where(self): exprs.append( ast.CompareOperation( op=ast.CompareOperationOp.Eq, - left=ast.Field(chain=["properties", "$exception_issue_id"]), + left=ast.Field(chain=["issue_id"]), right=ast.Constant(value=self.query.issueId), ) ) @@ -170,7 +170,7 @@ def results(self, columns: list[str], query_results: list): for result_dict in mapped_results: issue = issues.get(result_dict["id"]) if issue: - results.append(issue | result_dict | {"assignee": self.query.assignee}) + results.append(issue | result_dict | {"assignee": self.query.assignee, "id": str(result_dict["id"])}) else: logger.error( "error tracking issue not found", @@ -210,7 +210,7 @@ def error_tracking_issues(self, ids): else queryset ) issues = queryset.values("id", "status", "name", "description") - return {str(item["id"]): item for item in issues} + return {item["id"]: item for item in issues} def search_tokenizer(query: str) -> list[str]: diff --git a/posthog/hogql_queries/experiments/experiment_trends_query_runner.py b/posthog/hogql_queries/experiments/experiment_trends_query_runner.py index 65d4fd179d1ec6..3bc9888021158c 100644 --- a/posthog/hogql_queries/experiments/experiment_trends_query_runner.py +++ b/posthog/hogql_queries/experiments/experiment_trends_query_runner.py @@ -9,6 +9,11 @@ calculate_credible_intervals, calculate_probabilities, ) +from posthog.hogql_queries.experiments.trends_statistics_v2 import ( + are_results_significant_v2, + calculate_credible_intervals_v2, + calculate_probabilities_v2, +) from posthog.hogql_queries.insights.trends.trends_query_runner import TrendsQueryRunner from posthog.hogql_queries.query_runner import QueryRunner from posthog.models.experiment import Experiment @@ -56,6 +61,8 @@ def __init__(self, *args, **kwargs): self.variants.append(f"holdout-{self.experiment.holdout.id}") self.breakdown_key = f"$feature/{self.feature_flag.key}" + self.stats_version = self.query.stats_version or 1 + self.prepared_count_query = self._prepare_count_query() self.prepared_exposure_query = self._prepare_exposure_query() @@ -171,9 +178,10 @@ def _prepare_exposure_query(self) -> TrendsQuery: # 1. If math aggregation is used, we construct an implicit exposure query: unique users for the count event uses_math_aggregation = self._uses_math_aggregation_by_user_or_property_value(self.query.count_query) + prepared_count_query = TrendsQuery(**self.query.count_query.model_dump()) if uses_math_aggregation: - prepared_exposure_query = TrendsQuery(**self.query.count_query.model_dump()) + prepared_exposure_query = prepared_count_query prepared_exposure_query.dateRange = self._get_insight_date_range() prepared_exposure_query.trendsFilter = TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH_CUMULATIVE) @@ -219,7 +227,7 @@ def _prepare_exposure_query(self) -> TrendsQuery: raise ValueError("Expected first series item to have an 'event' attribute") # 2. Otherwise, if an exposure query is provided, we use it as is, adapting the date range and breakdown - elif self.query.exposure_query: + elif self.query.exposure_query and not self._is_data_warehouse_query(prepared_count_query): prepared_exposure_query = TrendsQuery(**self.query.exposure_query.model_dump()) prepared_exposure_query.dateRange = self._get_insight_date_range() prepared_exposure_query.trendsFilter = TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH_CUMULATIVE) @@ -307,9 +315,14 @@ def run(query_runner: TrendsQueryRunner, result_key: str, is_parallel: bool): # Statistical analysis control_variant, test_variants = self._get_variants_with_base_stats(count_result, exposure_result) - probabilities = calculate_probabilities(control_variant, test_variants) - significance_code, p_value = are_results_significant(control_variant, test_variants, probabilities) - credible_intervals = calculate_credible_intervals([control_variant, *test_variants]) + if self.stats_version == 2: + probabilities = calculate_probabilities_v2(control_variant, test_variants) + significance_code, p_value = are_results_significant_v2(control_variant, test_variants, probabilities) + credible_intervals = calculate_credible_intervals_v2([control_variant, *test_variants]) + else: + probabilities = calculate_probabilities(control_variant, test_variants) + significance_code, p_value = are_results_significant(control_variant, test_variants, probabilities) + credible_intervals = calculate_credible_intervals([control_variant, *test_variants]) return ExperimentTrendsQueryResponse( kind="ExperimentTrendsQuery", @@ -323,6 +336,7 @@ def run(query_runner: TrendsQueryRunner, result_key: str, is_parallel: bool): }, significant=significance_code == ExperimentSignificanceCode.SIGNIFICANT, significance_code=significance_code, + stats_version=self.stats_version, p_value=p_value, credible_intervals=credible_intervals, ) diff --git a/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py index c8596256254d58..bba1366c334187 100644 --- a/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py +++ b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py @@ -17,7 +17,7 @@ OBJECT_STORAGE_SECRET_ACCESS_KEY, XDIST_SUFFIX, ) -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, flush_persons_and_events +from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events from freezegun import freeze_time from typing import cast from django.utils import timezone @@ -36,6 +36,7 @@ from posthog.warehouse.models.credential import DataWarehouseCredential from posthog.warehouse.models.join import DataWarehouseJoin from posthog.warehouse.models.table import DataWarehouseTable +from posthog.hogql.query import execute_hogql_query TEST_BUCKET = "test_storage_bucket-posthog.hogql.datawarehouse.trendquery" + XDIST_SUFFIX @@ -182,6 +183,80 @@ def create_data_warehouse_table_with_payments(self): ) return table_name + def create_data_warehouse_table_with_usage(self): + if not OBJECT_STORAGE_ACCESS_KEY_ID or not OBJECT_STORAGE_SECRET_ACCESS_KEY: + raise Exception("Missing vars") + + fs = s3fs.S3FileSystem( + client_kwargs={ + "region_name": "us-east-1", + "endpoint_url": OBJECT_STORAGE_ENDPOINT, + "aws_access_key_id": OBJECT_STORAGE_ACCESS_KEY_ID, + "aws_secret_access_key": OBJECT_STORAGE_SECRET_ACCESS_KEY, + }, + ) + + path_to_s3_object = "s3://" + OBJECT_STORAGE_BUCKET + f"/{TEST_BUCKET}" + + id = pa.array(["1", "2", "3", "4", "5", "6"]) + date = pa.array(["2023-01-01", "2023-01-02", "2023-01-03", "2023-01-04", "2023-01-06", "2023-01-07"]) + user_id = pa.array( + ["user_control_0", "user_test_1", "user_test_2", "internal_test_1", "user_test_3", "user_extra"] + ) + usage = pa.array([1000, 500, 750, 100000, 800, 900]) + names = ["id", "ds", "userid", "usage"] + + pq.write_to_dataset( + pa.Table.from_arrays([id, date, user_id, usage], names=names), + path_to_s3_object, + filesystem=fs, + use_dictionary=True, + compression="snappy", + version="2.0", + ) + + table_name = "usage" + + credential = DataWarehouseCredential.objects.create( + access_key=OBJECT_STORAGE_ACCESS_KEY_ID, + access_secret=OBJECT_STORAGE_SECRET_ACCESS_KEY, + team=self.team, + ) + + DataWarehouseTable.objects.create( + name=table_name, + url_pattern=f"http://host.docker.internal:19000/{OBJECT_STORAGE_BUCKET}/{TEST_BUCKET}/*.parquet", + format=DataWarehouseTable.TableFormat.Parquet, + team=self.team, + columns={ + "id": "String", + "ds": "Date", + "userid": "String", + "usage": "Int64", + }, + credential=credential, + ) + + DataWarehouseJoin.objects.create( + team=self.team, + source_table_name=table_name, + source_table_key="userid", + joining_table_name="events", + joining_table_key="properties.$user_id", + field_name="events", + configuration={"experiments_optimized": True, "experiments_timestamp_key": "ds"}, + ) + + DataWarehouseJoin.objects.create( + team=self.team, + source_table_name=table_name, + source_table_key="userid", + joining_table_name="persons", + joining_table_key="properties.$user_id", + field_name="person", + ) + return table_name + @freeze_time("2020-01-01T12:00:00Z") def test_query_runner(self): feature_flag = self.create_feature_flag() @@ -218,7 +293,11 @@ def test_query_runner(self): team=self.team, event="$feature_flag_called", distinct_id=f"user_{variant}_{i}", - properties={feature_flag_property: variant}, + properties={ + "$feature_flag_response": variant, + feature_flag_property: variant, + "$feature_flag": feature_flag.key, + }, ) flush_persons_and_events() @@ -466,7 +545,11 @@ def test_query_runner_with_holdout(self): team=self.team, event="$feature_flag_called", distinct_id=f"user_{variant}_{i}", - properties={feature_flag_property: variant}, + properties={ + "$feature_flag_response": variant, + feature_flag_property: variant, + "$feature_flag": feature_flag.key, + }, ) flush_persons_and_events() @@ -532,7 +615,11 @@ def test_query_runner_with_data_warehouse_series_total_count(self): team=self.team, event="$feature_flag_called", distinct_id=f"user_{variant}_{i}", - properties={feature_flag_property: variant}, + properties={ + "$feature_flag_response": variant, + feature_flag_property: variant, + "$feature_flag": feature_flag.key, + }, timestamp=datetime(2023, 1, i + 1), ) @@ -546,7 +633,11 @@ def test_query_runner_with_data_warehouse_series_total_count(self): team=self.team, event="$feature_flag_called", distinct_id="user_test_3", - properties={feature_flag_property: "control"}, + properties={ + feature_flag_property: "control", + "$feature_flag_response": "control", + "$feature_flag": feature_flag.key, + }, timestamp=datetime(2023, 1, 3), ) _create_event( @@ -560,7 +651,11 @@ def test_query_runner_with_data_warehouse_series_total_count(self): team=self.team, event="$feature_flag_called", distinct_id="user_test_3", - properties={feature_flag_property: "control"}, + properties={ + feature_flag_property: "control", + "$feature_flag_response": "control", + "$feature_flag": feature_flag.key, + }, timestamp=datetime(2023, 1, 9), ) @@ -629,7 +724,11 @@ def test_query_runner_with_data_warehouse_series_avg_amount(self): team=self.team, event="$feature_flag_called", distinct_id=f"user_{variant}_{i}", - properties={feature_flag_property: variant}, + properties={ + "$feature_flag_response": variant, + feature_flag_property: variant, + "$feature_flag": feature_flag.key, + }, timestamp=datetime(2023, 1, i + 1), ) @@ -643,21 +742,33 @@ def test_query_runner_with_data_warehouse_series_avg_amount(self): team=self.team, event="$feature_flag_called", distinct_id="user_test_3", - properties={feature_flag_property: "control"}, + properties={ + "$feature_flag_response": "control", + feature_flag_property: "control", + "$feature_flag": feature_flag.key, + }, timestamp=datetime(2023, 1, 3), ) _create_event( team=self.team, event="Some other event", distinct_id="user_test_3", - properties={feature_flag_property: "control"}, + properties={ + "$feature_flag_response": "control", + feature_flag_property: "control", + "$feature_flag": feature_flag.key, + }, timestamp=datetime(2023, 1, 5), ) _create_event( team=self.team, event="$feature_flag_called", distinct_id="user_test_3", - properties={feature_flag_property: "control"}, + properties={ + "$feature_flag_response": "control", + feature_flag_property: "control", + "$feature_flag": feature_flag.key, + }, timestamp=datetime(2023, 1, 9), ) @@ -693,6 +804,329 @@ def test_query_runner_with_data_warehouse_series_avg_amount(self): [0.0, 50.0, 125.0, 125.0, 125.0, 205.0, 205.0, 205.0, 205.0, 205.0], ) + def test_query_runner_with_data_warehouse_series_no_end_date_and_nested_id(self): + table_name = self.create_data_warehouse_table_with_usage() + + feature_flag = self.create_feature_flag() + experiment = self.create_experiment( + feature_flag=feature_flag, + start_date=datetime(2023, 1, 1), + ) + + feature_flag_property = f"$feature/{feature_flag.key}" + + self.team.test_account_filters = [ + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + } + ] + self.team.save() + count_query = TrendsQuery( + series=[ + DataWarehouseNode( + id=table_name, + distinct_id_field="userid", + id_field="id", + table_name=table_name, + timestamp_field="ds", + math="avg", + math_property="usage", + math_property_type="data_warehouse_properties", + ) + ], + filterTestAccounts=True, + ) + exposure_query = TrendsQuery(series=[EventsNode(event="$feature_flag_called")], filterTestAccounts=True) + + experiment_query = ExperimentTrendsQuery( + experiment_id=experiment.id, + kind="ExperimentTrendsQuery", + count_query=count_query, + exposure_query=exposure_query, + ) + + experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}] + experiment.save() + + # Populate exposure events + for variant, count in [("control", 7), ("test", 9)]: + for i in range(count): + _create_event( + team=self.team, + event="$feature_flag_called", + distinct_id=f"distinct_{variant}_{i}", + properties={ + "$feature_flag_response": variant, + feature_flag_property: variant, + "$feature_flag": feature_flag.key, + "$user_id": f"user_{variant}_{i}", + }, + timestamp=datetime(2023, 1, i + 1), + ) + + _create_person( + team=self.team, + distinct_ids=["internal_test_1"], + properties={"email": "internal_test_1@posthog.com", "$user_id": "internal_test_1"}, + ) + + _create_event( + team=self.team, + event="$feature_flag_called", + distinct_id="internal_test_1", + properties={ + feature_flag_property: "test", + "$feature_flag_response": "test", + "$feature_flag": feature_flag.key, + "$user_id": "internal_test_1", + }, + timestamp=datetime(2023, 1, 3), + ) + + # "user_test_3" first exposure (feature_flag_property="control") is on 2023-01-03 + # "user_test_3" relevant exposure (feature_flag_property="test") is on 2023-01-04 + # "user_test_3" other event (feature_flag_property="control" is on 2023-01-05 + # "user_test_3" purchase is on 2023-01-06 + # "user_test_3" second exposure (feature_flag_property="control") is on 2023-01-09 + # "user_test_3" should fall into the "test" variant, not the "control" variant + _create_event( + team=self.team, + event="$feature_flag_called", + distinct_id="distinct_test_3", + properties={ + "$feature_flag_response": "control", + feature_flag_property: "control", + "$feature_flag": feature_flag.key, + "$user_id": "user_test_3", + }, + timestamp=datetime(2023, 1, 3), + ) + _create_event( + team=self.team, + event="Some other event", + distinct_id="distinct_test_3", + properties={ + "$feature_flag_response": "control", + feature_flag_property: "control", + "$feature_flag": feature_flag.key, + "$user_id": "user_test_3", + }, + timestamp=datetime(2023, 1, 5), + ) + _create_event( + team=self.team, + event="$feature_flag_called", + distinct_id="distinct_test_3", + properties={ + "$feature_flag_response": "control", + feature_flag_property: "control", + "$feature_flag": feature_flag.key, + "$user_id": "user_test_3", + }, + timestamp=datetime(2023, 1, 9), + ) + + flush_persons_and_events() + + query_runner = ExperimentTrendsQueryRunner( + query=ExperimentTrendsQuery(**experiment.metrics[0]["query"]), team=self.team + ) + with freeze_time("2023-01-07"): + # Build and execute the query to get the ClickHouse SQL + queries = query_runner.count_query_runner.to_queries() + response = execute_hogql_query( + query_type="TrendsQuery", + query=queries[0], + team=query_runner.count_query_runner.team, + modifiers=query_runner.count_query_runner.modifiers, + limit_context=query_runner.count_query_runner.limit_context, + ) + + # Assert the expected join condition in the clickhouse SQL + expected_join_condition = f"and(equals(events.team_id, {query_runner.count_query_runner.team.id}), equals(event, %(hogql_val_12)s), greaterOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull(%(hogql_val_13)s, 6, %(hogql_val_14)s))), lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull(%(hogql_val_15)s, 6, %(hogql_val_16)s))))) AS e__events ON" + self.assertIn( + expected_join_condition, + str(response.clickhouse), + "Please check to make sure the timestamp statements are included in the ASOF LEFT JOIN select statement. This may also fail if the placeholder numbers have changed.", + ) + + result = query_runner.calculate() + + trend_result = cast(ExperimentTrendsQueryResponse, result) + + self.assertEqual(len(result.variants), 2) + + control_result = next(variant for variant in trend_result.variants if variant.key == "control") + test_result = next(variant for variant in trend_result.variants if variant.key == "test") + + control_insight = next(variant for variant in trend_result.insight if variant["breakdown_value"] == "control") + test_insight = next(variant for variant in trend_result.insight if variant["breakdown_value"] == "test") + + self.assertEqual(control_result.count, 1000) + self.assertEqual(test_result.count, 2050) + self.assertEqual(control_result.absolute_exposure, 1) + self.assertEqual(test_result.absolute_exposure, 3) + + self.assertEqual( + control_insight["data"][:10], + [1000.0, 1000.0, 1000.0, 1000.0, 1000.0, 1000.0, 1000.0, 1000.0, 1000.0, 1000.0], + ) + self.assertEqual( + test_insight["data"][:10], + [0.0, 500.0, 1250.0, 1250.0, 1250.0, 2050.0, 2050.0, 2050.0, 2050.0, 2050.0], + ) + + # Run the query again with filter_test_accounts=False + # as a point of comparison to above + count_query = TrendsQuery( + series=[ + DataWarehouseNode( + id=table_name, + distinct_id_field="userid", + id_field="id", + table_name=table_name, + timestamp_field="ds", + math="avg", + math_property="usage", + math_property_type="data_warehouse_properties", + ) + ], + filterTestAccounts=False, + ) + exposure_query = TrendsQuery(series=[EventsNode(event="$feature_flag_called")], filterTestAccounts=False) + + experiment_query = ExperimentTrendsQuery( + experiment_id=experiment.id, + kind="ExperimentTrendsQuery", + count_query=count_query, + exposure_query=exposure_query, + ) + + experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}] + experiment.save() + + query_runner = ExperimentTrendsQueryRunner( + query=ExperimentTrendsQuery(**experiment.metrics[0]["query"]), team=self.team + ) + with freeze_time("2023-01-07"): + result = query_runner.calculate() + + trend_result = cast(ExperimentTrendsQueryResponse, result) + + self.assertEqual(len(result.variants), 2) + + control_result = next(variant for variant in trend_result.variants if variant.key == "control") + test_result = next(variant for variant in trend_result.variants if variant.key == "test") + + control_insight = next(variant for variant in trend_result.insight if variant["breakdown_value"] == "control") + test_insight = next(variant for variant in trend_result.insight if variant["breakdown_value"] == "test") + + self.assertEqual(control_result.count, 1000) + self.assertEqual(test_result.count, 102050) + self.assertEqual(control_result.absolute_exposure, 1) + self.assertEqual(test_result.absolute_exposure, 4) + + self.assertEqual( + control_insight["data"][:10], + [1000.0, 1000.0, 1000.0, 1000.0, 1000.0, 1000.0, 1000.0, 1000.0, 1000.0, 1000.0], + ) + self.assertEqual( + test_insight["data"][:10], + [0.0, 500.0, 1250.0, 101250.0, 101250.0, 102050.0, 102050.0, 102050.0, 102050.0, 102050.0], + ) + + def test_query_runner_with_data_warehouse_series_expected_query(self): + table_name = self.create_data_warehouse_table_with_payments() + + feature_flag = self.create_feature_flag() + experiment = self.create_experiment( + feature_flag=feature_flag, + start_date=datetime(2023, 1, 1), + end_date=datetime(2023, 1, 10), + ) + + feature_flag_property = f"$feature/{feature_flag.key}" + + count_query = TrendsQuery( + series=[ + DataWarehouseNode( + id=table_name, + distinct_id_field="dw_distinct_id", + id_field="id", + table_name=table_name, + timestamp_field="dw_timestamp", + math="total", + ) + ] + ) + exposure_query = TrendsQuery(series=[EventsNode(event="$feature_flag_called")]) + + experiment_query = ExperimentTrendsQuery( + experiment_id=experiment.id, + kind="ExperimentTrendsQuery", + count_query=count_query, + exposure_query=exposure_query, + ) + + experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}] + experiment.save() + + # Populate exposure events + for variant, count in [("control", 7), ("test", 9)]: + for i in range(count): + _create_event( + team=self.team, + event="$feature_flag_called", + distinct_id=f"user_{variant}_{i}", + properties={ + "$feature_flag_response": variant, + feature_flag_property: variant, + "$feature_flag": feature_flag.key, + }, + timestamp=datetime(2023, 1, i + 1), + ) + + flush_persons_and_events() + + query_runner = ExperimentTrendsQueryRunner( + query=ExperimentTrendsQuery(**experiment.metrics[0]["query"]), team=self.team + ) + with freeze_time("2023-01-07"): + # Build and execute the query to get the ClickHouse SQL + queries = query_runner.count_query_runner.to_queries() + response = execute_hogql_query( + query_type="TrendsQuery", + query=queries[0], + team=query_runner.count_query_runner.team, + modifiers=query_runner.count_query_runner.modifiers, + limit_context=query_runner.count_query_runner.limit_context, + ) + + # Assert the expected join condition in the clickhouse SQL + expected_join_condition = f"and(equals(events.team_id, {query_runner.count_query_runner.team.id}), equals(event, %(hogql_val_7)s), greaterOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull(%(hogql_val_8)s, 6, %(hogql_val_9)s))), lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull(%(hogql_val_10)s, 6, %(hogql_val_11)s))))) AS e__events ON" + self.assertIn( + expected_join_condition, + str(response.clickhouse), + "Please check to make sure the timestamp statements are included in the ASOF LEFT JOIN select statement. This may also fail if the placeholder numbers have changed.", + ) + + result = query_runner.calculate() + + trend_result = cast(ExperimentTrendsQueryResponse, result) + + self.assertEqual(len(result.variants), 2) + + control_result = next(variant for variant in trend_result.variants if variant.key == "control") + test_result = next(variant for variant in trend_result.variants if variant.key == "test") + + self.assertEqual(control_result.count, 1) + self.assertEqual(test_result.count, 3) + self.assertEqual(control_result.absolute_exposure, 7) + self.assertEqual(test_result.absolute_exposure, 9) + def test_query_runner_with_invalid_data_warehouse_table_name(self): # parquet file isn't created, so we'll get an error table_name = "invalid_table_name" @@ -772,7 +1206,11 @@ def test_query_runner_with_avg_math(self): team=self.team, event="$feature_flag_called", distinct_id=f"user_{variant}_{i}", - properties={feature_flag_property: variant}, + properties={ + "$feature_flag_response": variant, + feature_flag_property: variant, + "$feature_flag": feature_flag.key, + }, timestamp=datetime(2020, 1, i + 1), ) @@ -867,7 +1305,11 @@ def test_query_runner_standard_flow(self): { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {ff_property: "control"}, + "properties": { + "$feature_flag_response": "control", + ff_property: "control", + "$feature_flag": feature_flag.key, + }, }, ], "user_control_2": [ @@ -875,19 +1317,39 @@ def test_query_runner_standard_flow(self): { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {ff_property: "control"}, + "properties": { + "$feature_flag_response": "control", + ff_property: "control", + "$feature_flag": feature_flag.key, + }, }, ], "user_test_1": [ {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, {"event": "$pageview", "timestamp": "2020-01-03", "properties": {ff_property: "test"}}, {"event": "$pageview", "timestamp": "2020-01-04", "properties": {ff_property: "test"}}, - {"event": "$feature_flag_called", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + { + "event": "$feature_flag_called", + "timestamp": "2020-01-02", + "properties": { + "$feature_flag_response": "test", + ff_property: "test", + "$feature_flag": feature_flag.key, + }, + }, ], "user_test_2": [ {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, {"event": "$pageview", "timestamp": "2020-01-03", "properties": {ff_property: "test"}}, - {"event": "$feature_flag_called", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + { + "event": "$feature_flag_called", + "timestamp": "2020-01-02", + "properties": { + "$feature_flag_response": "test", + ff_property: "test", + "$feature_flag": feature_flag.key, + }, + }, ], }, self.team, @@ -898,6 +1360,7 @@ def test_query_runner_standard_flow(self): query_runner = ExperimentTrendsQueryRunner( query=ExperimentTrendsQuery(**experiment.metrics[0]["query"]), team=self.team ) + self.assertEqual(query_runner.stats_version, 1) result = query_runner.calculate() self.assertEqual(len(result.variants), 2) @@ -936,6 +1399,129 @@ def test_query_runner_standard_flow(self): self.assertEqual(test_variant.count, 5.0) self.assertEqual(test_variant.exposure, 1.0) + @flaky(max_runs=10, min_passes=1) + @freeze_time("2020-01-01T12:00:00Z") + def test_query_runner_standard_flow_v2_stats(self): + feature_flag = self.create_feature_flag() + experiment = self.create_experiment(feature_flag=feature_flag) + + ff_property = f"$feature/{feature_flag.key}" + count_query = TrendsQuery(series=[EventsNode(event="$pageview")]) + exposure_query = TrendsQuery(series=[EventsNode(event="$feature_flag_called")]) + + experiment_query = ExperimentTrendsQuery( + experiment_id=experiment.id, + kind="ExperimentTrendsQuery", + count_query=count_query, + exposure_query=exposure_query, + stats_version=2, + ) + + experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}] + experiment.save() + + journeys_for( + { + "user_control_1": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "control"}}, + {"event": "$pageview", "timestamp": "2020-01-03", "properties": {ff_property: "control"}}, + { + "event": "$feature_flag_called", + "timestamp": "2020-01-02", + "properties": { + "$feature_flag_response": "control", + ff_property: "control", + "$feature_flag": feature_flag.key, + }, + }, + ], + "user_control_2": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "control"}}, + { + "event": "$feature_flag_called", + "timestamp": "2020-01-02", + "properties": { + "$feature_flag_response": "control", + ff_property: "control", + "$feature_flag": feature_flag.key, + }, + }, + ], + "user_test_1": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + {"event": "$pageview", "timestamp": "2020-01-03", "properties": {ff_property: "test"}}, + {"event": "$pageview", "timestamp": "2020-01-04", "properties": {ff_property: "test"}}, + { + "event": "$feature_flag_called", + "timestamp": "2020-01-02", + "properties": { + "$feature_flag_response": "test", + ff_property: "test", + "$feature_flag": feature_flag.key, + }, + }, + ], + "user_test_2": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + {"event": "$pageview", "timestamp": "2020-01-03", "properties": {ff_property: "test"}}, + { + "event": "$feature_flag_called", + "timestamp": "2020-01-02", + "properties": { + "$feature_flag_response": "test", + ff_property: "test", + "$feature_flag": feature_flag.key, + }, + }, + ], + }, + self.team, + ) + + flush_persons_and_events() + + query_runner = ExperimentTrendsQueryRunner( + query=ExperimentTrendsQuery(**experiment.metrics[0]["query"]), team=self.team + ) + self.assertEqual(query_runner.stats_version, 2) + result = query_runner.calculate() + + self.assertEqual(len(result.variants), 2) + for variant in result.variants: + self.assertIn(variant.key, ["control", "test"]) + + control_variant = next(v for v in result.variants if v.key == "control") + test_variant = next(v for v in result.variants if v.key == "test") + + self.assertEqual(control_variant.count, 3) + self.assertEqual(test_variant.count, 5) + self.assertEqual(control_variant.absolute_exposure, 2) + self.assertEqual(test_variant.absolute_exposure, 2) + + self.assertAlmostEqual(result.credible_intervals["control"][0], 0.3633, places=3) + self.assertAlmostEqual(result.credible_intervals["control"][1], 2.9224, places=3) + self.assertAlmostEqual(result.credible_intervals["test"][0], 0.7339, places=3) + self.assertAlmostEqual(result.credible_intervals["test"][1], 3.8894, places=3) + + self.assertAlmostEqual(result.p_value, 1.0, places=3) + + self.assertAlmostEqual(result.probability["control"], 0.2549, places=2) + self.assertAlmostEqual(result.probability["test"], 0.7453, places=2) + + self.assertEqual(result.significance_code, ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE) + + self.assertFalse(result.significant) + + self.assertEqual(len(result.variants), 2) + + self.assertEqual(control_variant.absolute_exposure, 2.0) + self.assertEqual(control_variant.count, 3.0) + self.assertEqual(control_variant.exposure, 1.0) + + self.assertEqual(test_variant.absolute_exposure, 2.0) + self.assertEqual(test_variant.count, 5.0) + self.assertEqual(test_variant.exposure, 1.0) + @freeze_time("2020-01-01T12:00:00Z") def test_validate_event_variants_no_events(self): feature_flag = self.create_feature_flag() diff --git a/posthog/hogql_queries/experiments/test/test_trends_statistics.py b/posthog/hogql_queries/experiments/test/test_trends_statistics.py new file mode 100644 index 00000000000000..b2bea591bc83b2 --- /dev/null +++ b/posthog/hogql_queries/experiments/test/test_trends_statistics.py @@ -0,0 +1,295 @@ +from posthog.hogql_queries.experiments import MIN_PROBABILITY_FOR_SIGNIFICANCE +from posthog.schema import ExperimentVariantTrendsBaseStats, ExperimentSignificanceCode +from posthog.hogql_queries.experiments.trends_statistics_v2 import ( + calculate_probabilities_v2, + are_results_significant_v2, + calculate_credible_intervals_v2, +) +from posthog.hogql_queries.experiments.trends_statistics import ( + calculate_probabilities, + are_results_significant, + calculate_credible_intervals, +) +from posthog.test.base import APIBaseTest + + +def create_variant(key: str, count: int, exposure: int) -> ExperimentVariantTrendsBaseStats: + return ExperimentVariantTrendsBaseStats(key=key, count=count, exposure=exposure, absolute_exposure=exposure) + + +def create_variant_with_different_exposures( + key: str, + count: int, + exposure: float, # relative exposure + absolute_exposure: int, # absolute exposure +) -> ExperimentVariantTrendsBaseStats: + return ExperimentVariantTrendsBaseStats( + key=key, count=count, exposure=exposure, absolute_exposure=absolute_exposure + ) + + +class TestExperimentTrendsStatistics(APIBaseTest): + def run_test_for_both_implementations(self, test_fn): + """Run the same test for both implementations""" + # Run for original implementation + test_fn( + stats_version=1, + calculate_probabilities=calculate_probabilities, + are_results_significant=are_results_significant, + calculate_credible_intervals=calculate_credible_intervals, + ) + # Run for v2 implementation + test_fn( + stats_version=2, + calculate_probabilities=calculate_probabilities_v2, + are_results_significant=are_results_significant_v2, + calculate_credible_intervals=calculate_credible_intervals_v2, + ) + + def test_small_sample_two_variants_not_significant(self): + """Test with small sample size, two variants, no clear winner""" + + def run_test(stats_version, calculate_probabilities, are_results_significant, calculate_credible_intervals): + control = create_variant("control", count=10, exposure=100) + test = create_variant("test", count=11, exposure=100) + + probabilities = calculate_probabilities(control, [test]) + significance, p_value = are_results_significant(control, [test], probabilities) + intervals = calculate_credible_intervals([control, test]) + + self.assertEqual(len(probabilities), 2) + self.assertTrue(0.4 < probabilities[0] < 0.6) # Close to 50/50 + self.assertTrue(0.4 < probabilities[1] < 0.6) # Close to 50/50 + self.assertEqual(significance, ExperimentSignificanceCode.LOW_WIN_PROBABILITY) + self.assertEqual(p_value, 1) + + # Control: ~10% conversion rate with wide interval due to small sample + self.assertAlmostEqual(intervals["control"][0], 0.055, places=2) # Lower bound ~5.5% + self.assertAlmostEqual(intervals["control"][1], 0.182, places=2) # Upper bound ~18.2% + + # Test: ~11% conversion rate with wide interval due to small sample + self.assertAlmostEqual(intervals["test"][0], 0.062, places=2) # Lower bound ~6.2% + self.assertAlmostEqual(intervals["test"][1], 0.195, places=2) # Upper bound ~19.5% + + self.run_test_for_both_implementations(run_test) + + def test_large_sample_two_variants_significant(self): + """Test with large sample size, two variants, clear winner""" + + def run_test(stats_version, calculate_probabilities, are_results_significant, calculate_credible_intervals): + control = create_variant("control", count=1000, exposure=10000) + test = create_variant("test", count=1200, exposure=10000) + + probabilities = calculate_probabilities(control, [test]) + significance, p_value = are_results_significant(control, [test], probabilities) + intervals = calculate_credible_intervals([control, test]) + + self.assertEqual(len(probabilities), 2) + self.assertTrue(probabilities[1] > 0.95) # Test variant strongly winning + self.assertTrue(probabilities[0] < 0.05) # Control variant strongly losing + self.assertEqual(significance, ExperimentSignificanceCode.SIGNIFICANT) + if stats_version == 2: + self.assertEqual(p_value, 0) + else: + self.assertLess(p_value, 0.001) + + # Control: 10% conversion rate with narrow interval due to large sample + self.assertAlmostEqual(intervals["control"][0], 0.094, places=2) # Lower bound ~9.4% + self.assertAlmostEqual(intervals["control"][1], 0.106, places=2) # Upper bound ~10.6% + + # Test: 12% conversion rate with narrow interval due to large sample + self.assertAlmostEqual(intervals["test"][0], 0.114, places=2) # Lower bound ~11.4% + self.assertAlmostEqual(intervals["test"][1], 0.126, places=2) # Upper bound ~12.6% + + self.run_test_for_both_implementations(run_test) + + def test_large_sample_two_variants_strongly_significant(self): + """Test with large sample size, two variants, very clear winner""" + + def run_test(stats_version, calculate_probabilities, are_results_significant, calculate_credible_intervals): + control = create_variant("control", count=1000, exposure=10000) + test = create_variant("test", count=1500, exposure=10000) + + probabilities = calculate_probabilities(control, [test]) + significance, p_value = are_results_significant(control, [test], probabilities) + intervals = calculate_credible_intervals([control, test]) + + self.assertEqual(len(probabilities), 2) + self.assertTrue(probabilities[1] > 0.99) # Test variant very strongly winning + self.assertTrue(probabilities[0] < 0.01) # Control variant very strongly losing + self.assertEqual(significance, ExperimentSignificanceCode.SIGNIFICANT) + if stats_version == 2: + self.assertEqual(p_value, 0) + else: + self.assertLess(p_value, 0.001) + + # Control: 10% conversion rate + self.assertAlmostEqual(intervals["control"][0], 0.094, places=2) # Lower bound ~9.4% + self.assertAlmostEqual(intervals["control"][1], 0.106, places=2) # Upper bound ~10.6% + + # Test: 15% conversion rate, clearly higher than control + self.assertAlmostEqual(intervals["test"][0], 0.143, places=2) # Lower bound ~14.3% + self.assertAlmostEqual(intervals["test"][1], 0.157, places=2) # Upper bound ~15.7% + + self.run_test_for_both_implementations(run_test) + + def test_many_variants_not_significant(self): + """Test with multiple variants, no clear winner""" + + def run_test(stats_version, calculate_probabilities, are_results_significant, calculate_credible_intervals): + control = create_variant("control", count=100, exposure=1000) + test_a = create_variant("test_a", count=98, exposure=1000) + test_b = create_variant("test_b", count=102, exposure=1000) + test_c = create_variant("test_c", count=101, exposure=1000) + + probabilities = calculate_probabilities(control, [test_a, test_b, test_c]) + significance, p_value = are_results_significant(control, [test_a, test_b, test_c], probabilities) + intervals = calculate_credible_intervals([control, test_a, test_b, test_c]) + + self.assertEqual(len(probabilities), 4) + self.assertTrue(all(p < MIN_PROBABILITY_FOR_SIGNIFICANCE for p in probabilities)) + self.assertEqual(significance, ExperimentSignificanceCode.LOW_WIN_PROBABILITY) + self.assertEqual(p_value, 1) + + # All variants around 10% with overlapping intervals + self.assertAlmostEqual(intervals["control"][0], 0.083, places=2) # ~8.3% + self.assertAlmostEqual(intervals["control"][1], 0.119, places=2) # ~11.9% + + self.assertAlmostEqual(intervals["test_a"][0], 0.081, places=2) # ~8.1% + self.assertAlmostEqual(intervals["test_a"][1], 0.117, places=2) # ~11.7% + + self.assertAlmostEqual(intervals["test_b"][0], 0.085, places=2) # ~8.5% + self.assertAlmostEqual(intervals["test_b"][1], 0.121, places=2) # ~12.1% + + self.assertAlmostEqual(intervals["test_c"][0], 0.084, places=2) # ~8.4% + self.assertAlmostEqual(intervals["test_c"][1], 0.120, places=2) # ~12.0% + + self.run_test_for_both_implementations(run_test) + + def test_many_variants_significant(self): + """Test with multiple variants, one clear winner""" + + def run_test(stats_version, calculate_probabilities, are_results_significant, calculate_credible_intervals): + control = create_variant("control", count=1000, exposure=10000) + test_a = create_variant("test_a", count=1050, exposure=10000) + test_b = create_variant("test_b", count=1500, exposure=10000) + test_c = create_variant("test_c", count=1100, exposure=10000) + + probabilities = calculate_probabilities(control, [test_a, test_b, test_c]) + significance, p_value = are_results_significant(control, [test_a, test_b, test_c], probabilities) + intervals = calculate_credible_intervals([control, test_a, test_b, test_c]) + + self.assertEqual(len(probabilities), 4) + self.assertTrue(probabilities[2] > 0.9) # test_b should be winning + self.assertTrue(probabilities[1] < 0.1) # test_a should be losing + self.assertTrue(probabilities[0] < 0.1) # control should be losing + self.assertEqual(significance, ExperimentSignificanceCode.SIGNIFICANT) + if stats_version == 2: + self.assertEqual(p_value, 0) + else: + self.assertLess(p_value, 0.001) + + # Control at 10% + self.assertAlmostEqual(intervals["control"][0], 0.094, places=2) + self.assertAlmostEqual(intervals["control"][1], 0.106, places=2) + + # Test A slightly higher at 10.5% + self.assertAlmostEqual(intervals["test_a"][0], 0.099, places=2) + self.assertAlmostEqual(intervals["test_a"][1], 0.111, places=2) + + # Test B clearly winning at 15% + self.assertAlmostEqual(intervals["test_b"][0], 0.143, places=2) + self.assertAlmostEqual(intervals["test_b"][1], 0.157, places=2) + + # Test C slightly higher at 11% + self.assertAlmostEqual(intervals["test_c"][0], 0.104, places=2) + self.assertAlmostEqual(intervals["test_c"][1], 0.116, places=2) + + self.run_test_for_both_implementations(run_test) + + def test_insufficient_sample_size(self): + """Test with sample size below threshold""" + + def run_test(stats_version, calculate_probabilities, are_results_significant, calculate_credible_intervals): + control = create_variant("control", count=5, exposure=50) + test = create_variant("test", count=8, exposure=50) + + probabilities = calculate_probabilities(control, [test]) + significance, p_value = are_results_significant(control, [test], probabilities) + intervals = calculate_credible_intervals([control, test]) + + self.assertEqual(len(probabilities), 2) + # Assert individual probabilities + self.assertTrue(probabilities[0] < 0.5) # Control has lower probability + self.assertTrue(probabilities[1] > 0.5) # Test has higher probability + self.assertEqual(significance, ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE) + self.assertEqual(p_value, 1.0) + + # Both variants should have wide intervals due to small sample size + self.assertAlmostEqual(intervals["control"][0], 0.044, places=2) # 4.4% + self.assertAlmostEqual(intervals["control"][1], 0.229, places=2) # 22.9% + + self.assertAlmostEqual(intervals["test"][0], 0.083, places=2) # 8.3% + if stats_version == 2: + self.assertAlmostEqual(intervals["test"][1], 0.309, places=2) # 30.9% + else: + self.assertAlmostEqual(intervals["test"][1], 0.315, places=2) # 31.5% + + self.run_test_for_both_implementations(run_test) + + def test_edge_cases(self): + """Test edge cases like zero counts""" + + def run_test(stats_version, calculate_probabilities, are_results_significant, calculate_credible_intervals): + control = create_variant("control", count=0, exposure=1000) + test = create_variant("test", count=0, exposure=1000) + + probabilities = calculate_probabilities(control, [test]) + significance, p_value = are_results_significant(control, [test], probabilities) + intervals = calculate_credible_intervals([control, test]) + + self.assertEqual(len(probabilities), 2) + self.assertTrue(abs(probabilities[0] - 0.5) < 0.1) # Should be close to 50/50 + self.assertTrue(abs(probabilities[1] - 0.5) < 0.1) # Should be close to 50/50 + self.assertEqual(significance, ExperimentSignificanceCode.LOW_WIN_PROBABILITY) + self.assertEqual(p_value, 1) + + # Both variants should have very small intervals near zero + self.assertAlmostEqual(intervals["control"][0], 0.0, places=3) + self.assertAlmostEqual(intervals["control"][1], 0.004, places=3) + + self.assertAlmostEqual(intervals["test"][0], 0.0, places=3) + self.assertAlmostEqual(intervals["test"][1], 0.004, places=3) + + self.run_test_for_both_implementations(run_test) + + def test_different_relative_and_absolute_exposure(self): + """Test that credible intervals are calculated using absolute_exposure rather than relative exposure""" + + def run_test(stats_version, calculate_probabilities, are_results_significant, calculate_credible_intervals): + # Control has exposure=1 (relative) but absolute_exposure=10000 + control = create_variant_with_different_exposures( + "control", count=1000, exposure=1, absolute_exposure=10000 + ) + # Test has exposure=1.2 (relative) but absolute_exposure=12000 + test = create_variant_with_different_exposures("test", count=1200, exposure=1.2, absolute_exposure=12000) + + probabilities = calculate_probabilities(control, [test]) + significance, p_value = are_results_significant(control, [test], probabilities) + intervals = calculate_credible_intervals([control, test]) + + self.assertEqual(len(probabilities), 2) + self.assertTrue(0.4 < probabilities[0] < 0.6) # Close to 50/50 + self.assertTrue(0.4 < probabilities[1] < 0.6) # Close to 50/50 + self.assertEqual(significance, ExperimentSignificanceCode.LOW_WIN_PROBABILITY) + self.assertEqual(p_value, 1) + + # Control at ~10% conversion rate + self.assertAlmostEqual(intervals["control"][0], 0.094, places=2) + self.assertAlmostEqual(intervals["control"][1], 0.106, places=2) + + # Test at ~10% conversion rate + self.assertAlmostEqual(intervals["test"][0], 0.094, places=2) + self.assertAlmostEqual(intervals["test"][1], 0.106, places=2) + + self.run_test_for_both_implementations(run_test) diff --git a/posthog/hogql_queries/experiments/trends_statistics_v2.py b/posthog/hogql_queries/experiments/trends_statistics_v2.py new file mode 100644 index 00000000000000..fa4bf4be312108 --- /dev/null +++ b/posthog/hogql_queries/experiments/trends_statistics_v2.py @@ -0,0 +1,204 @@ +from rest_framework.exceptions import ValidationError +from sentry_sdk import capture_exception +from posthog.hogql_queries.experiments import FF_DISTRIBUTION_THRESHOLD, MIN_PROBABILITY_FOR_SIGNIFICANCE +from posthog.hogql_queries.experiments.funnels_statistics import Probability +from posthog.schema import ExperimentSignificanceCode, ExperimentVariantTrendsBaseStats +from scipy.stats import gamma +import numpy as np + +# Prior parameters (minimal prior knowledge) +PRIOR_ALPHA = 1 +PRIOR_BETA = 1 + +SAMPLE_SIZE = 10000 + + +def calculate_probabilities_v2( + control_variant: ExperimentVariantTrendsBaseStats, test_variants: list[ExperimentVariantTrendsBaseStats] +) -> list[float]: + """ + Calculate the win probabilities for each variant in an experiment using Bayesian analysis. + + This function computes the probability that each variant is the best (i.e., has the highest + conversion rate) compared to all other variants, including the control. It uses samples + drawn from the posterior distributions of each variant's conversion rate. + + Parameters: + ----------- + control_variant : ExperimentVariantTrendsBaseStats + Statistics for the control group, including count (successes) and exposure (total trials) + test_variants : list[ExperimentVariantTrendsBaseStats] + List of statistics for test variants to compare against the control + + Returns: + -------- + list[float] + A list of probabilities where: + - The first element is the probability that the control variant is the best + - Subsequent elements are the probabilities that each test variant is the best + + Notes: + ------ + - Uses a Bayesian approach with a Beta distribution as the posterior + - Assumes a minimally informative prior (alpha=1, beta=1) + - Draws samples from the posterior to estimate win probabilities + + Example: + -------- + >>> control = ExperimentVariantTrendsBaseStats(key="control", count=100, exposure=1000, absolute_exposure=1000) + >>> test = ExperimentVariantTrendsBaseStats(key="test", count=120, exposure=1000, absolute_exposure=1000) + >>> probabilities = calculate_probabilities_v2(control, [test]) + >>> # Returns: [0.085, 0.915] indicating the test variant is more likely to be the best + """ + if len(test_variants) >= 10: + raise ValidationError("Can't calculate experiment results for more than 10 variants", code="too_much_data") + if len(test_variants) < 1: + raise ValidationError("Can't calculate experiment results for less than 2 variants", code="no_data") + + # Calculate posterior parameters for control + alpha_control = PRIOR_ALPHA + control_variant.count + beta_control = PRIOR_BETA + control_variant.absolute_exposure + + # Draw samples from control posterior + samples_control = gamma.rvs(alpha_control, scale=1 / beta_control, size=SAMPLE_SIZE) + + # Draw samples for each test variant + test_samples = [] + for test in test_variants: + alpha_test = PRIOR_ALPHA + test.count + beta_test = PRIOR_BETA + test.absolute_exposure + test_samples.append(gamma.rvs(alpha_test, scale=1 / beta_test, size=SAMPLE_SIZE)) + + # Calculate probabilities + probabilities = [] + + # Probability control wins (beats all test variants) + control_wins = np.all([samples_control > test_sample for test_sample in test_samples], axis=0) + probabilities.append(float(np.mean(control_wins))) + + # Probability each test variant wins (beats control and all other test variants) + for i, test_sample in enumerate(test_samples): + other_test_samples = test_samples[:i] + test_samples[i + 1 :] + variant_wins = np.all( + [test_sample > samples_control] + [test_sample > other for other in other_test_samples], axis=0 + ) + probabilities.append(float(np.mean(variant_wins))) + + return probabilities + + +def are_results_significant_v2( + control_variant: ExperimentVariantTrendsBaseStats, + test_variants: list[ExperimentVariantTrendsBaseStats], + probabilities: list[Probability], +) -> tuple[ExperimentSignificanceCode, Probability]: + """ + Determines if experiment results are statistically significant using Bayesian analysis. + + This function evaluates the win probabilities of each variant to determine if any variant + is significantly better than the others. The method: + 1. Checks if sample sizes meet minimum threshold requirements + 2. Evaluates win probabilities from the posterior distributions + + Parameters: + ----------- + control_variant : ExperimentVariantTrendsBaseStats + Statistics for the control group, including count and exposure data + test_variants : list[ExperimentVariantTrendsBaseStats] + List of statistics for test variants to compare against control + probabilities : list[Probability] + List of win probabilities for each variant, as calculated by calculate_probabilities + + Returns: + -------- + tuple[ExperimentSignificanceCode, Probability] + - ExperimentSignificanceCode indicating the significance status: + NOT_ENOUGH_EXPOSURE: Insufficient sample size + LOW_WIN_PROBABILITY: No variant has a high enough probability of being best + SIGNIFICANT: Clear winner with high probability of being best + - Probability value (1.0 for NOT_ENOUGH_EXPOSURE and LOW_WIN_PROBABILITY, 0.0 for SIGNIFICANT) + + Notes: + ------ + - Uses a Bayesian approach to determine significance + - Does not use credible interval comparisons + - p_value is a placeholder (1.0 or 0.0) to indicate significance status + """ + # Check exposure thresholds + for variant in test_variants: + if variant.absolute_exposure < FF_DISTRIBUTION_THRESHOLD: + return ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE, 1.0 + + if control_variant.absolute_exposure < FF_DISTRIBUTION_THRESHOLD: + return ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE, 1.0 + + # Find highest probability among all variants + max_probability = max(probabilities) + + # Check if any variant has a high enough probability of being best + if max_probability < MIN_PROBABILITY_FOR_SIGNIFICANCE: + return ExperimentSignificanceCode.LOW_WIN_PROBABILITY, 1.0 + + return ExperimentSignificanceCode.SIGNIFICANT, 0.0 + + +def calculate_credible_intervals_v2(variants, lower_bound=0.025, upper_bound=0.975): + """ + Calculate Bayesian credible intervals for each variant's conversion rate. + + Credible intervals represent the range where we believe the true conversion rate lies + with a specified probability (default 95%). Unlike frequentist confidence intervals, + these have a direct probabilistic interpretation: "There is a 95% probability that + the true conversion rate lies within this interval." + + Parameters: + ----------- + variants : list[ExperimentVariantTrendsBaseStats] + List of variants containing count (successes) and exposure (total trials) data + lower_bound : float, optional (default=0.025) + Lower percentile for the credible interval (2.5% for 95% CI) + upper_bound : float, optional (default=0.975) + Upper percentile for the credible interval (97.5% for 95% CI) + + Returns: + -------- + dict[str, tuple[float, float]] + Dictionary mapping variant keys to their credible intervals + Each interval is a tuple of (lower_bound, upper_bound) + + Notes: + ------ + - Uses a Gamma distribution as the posterior distribution + - Assumes a minimally informative prior (alpha=1, beta=1) + - Intervals are calculated for visualization purposes, not for significance testing + - Returns empty dict if any calculations fail + + Example: + -------- + >>> variants = [ + ... ExperimentVariantTrendsBaseStats(key="control", count=100, exposure=1000, absolute_exposure=1000), + ... ExperimentVariantTrendsBaseStats(key="test", count=150, exposure=1000, absolute_exposure=1000) + ... ] + >>> intervals = calculate_credible_intervals_v2(variants) + >>> # Returns: {"control": (0.082, 0.122), "test": (0.128, 0.176)} + """ + intervals = {} + + for variant in variants: + try: + # Calculate posterior parameters using absolute_exposure + alpha_posterior = PRIOR_ALPHA + variant.count + beta_posterior = PRIOR_BETA + variant.absolute_exposure + + # Calculate credible intervals using the posterior distribution + credible_interval = gamma.ppf([lower_bound, upper_bound], alpha_posterior, scale=1 / beta_posterior) + + intervals[variant.key] = (float(credible_interval[0]), float(credible_interval[1])) + except Exception as e: + capture_exception( + Exception(f"Error calculating credible interval for variant {variant.key}"), + {"error": str(e)}, + ) + return {} + + return intervals diff --git a/posthog/hogql_queries/insights/funnels/base.py b/posthog/hogql_queries/insights/funnels/base.py index 98d0cedea69b91..8d11a84473f48e 100644 --- a/posthog/hogql_queries/insights/funnels/base.py +++ b/posthog/hogql_queries/insights/funnels/base.py @@ -726,7 +726,9 @@ def _build_step_query( first_time_filter = parse_expr("e.uuid IN {subquery}", placeholders={"subquery": subquery}) return ast.And(exprs=[*filters, first_time_filter]) elif entity.math == FunnelMathType.FIRST_TIME_FOR_USER_WITH_FILTERS: - subquery = FirstTimeForUserAggregationQuery(self.context, ast.Constant(value=1), filter_expr).to_query() + subquery = FirstTimeForUserAggregationQuery( + self.context, ast.Constant(value=1), ast.And(exprs=filters) + ).to_query() first_time_filter = parse_expr("e.uuid IN {subquery}", placeholders={"subquery": subquery}) return ast.And(exprs=[*filters, first_time_filter]) elif len(filters) > 1: diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py index 40d15aae68a698..cc7b5a7928d13a 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py +++ b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py @@ -7,6 +7,7 @@ from posthog.hogql.parser import parse_select, parse_expr from posthog.hogql_queries.insights.funnels import FunnelTrends from posthog.hogql_queries.insights.funnels.base import JOIN_ALGOS +from posthog.hogql_queries.insights.funnels.funnel_udf import FunnelUDFMixin from posthog.hogql_queries.insights.utils.utils import get_start_of_interval_hogql_str from posthog.schema import BreakdownType, BreakdownAttributionType from posthog.utils import DATERANGE_MAP, relative_date_parse @@ -15,7 +16,7 @@ HUMAN_READABLE_TIMESTAMP_FORMAT = "%-d-%b-%Y" -class FunnelTrendsUDF(FunnelTrends): +class FunnelTrendsUDF(FunnelUDFMixin, FunnelTrends): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # In base, these fields only get added if you're running an actors query @@ -65,8 +66,6 @@ def _inner_aggregation_query(self): else: inner_event_query = self._get_inner_event_query_for_udf(entity_name="events") - default_breakdown_selector = "[]" if self._query_has_array_breakdown() else "''" - # stores the steps as an array of integers from 1 to max_steps # so if the event could be step_0, step_1 or step_4, it looks like [1,2,0,0,5] @@ -89,8 +88,7 @@ def _inner_aggregation_query(self): fn = "aggregate_funnel_trends" breakdown_prop = "" - prop_selector = "prop" if self.context.breakdown else default_breakdown_selector - prop_vals = "groupUniqArray(prop)" if self.context.breakdown else f"[{default_breakdown_selector}]" + prop_selector = "prop" if self.context.breakdown else self._default_breakdown_selector() breakdown_attribution_string = f"{self.context.breakdownAttributionType}{f'_{self.context.funnelsFilter.breakdownAttributionValue}' if self.context.breakdownAttributionType == BreakdownAttributionType.STEP else ''}" @@ -114,7 +112,7 @@ def _inner_aggregation_query(self): {self.conversion_window_limit()}, '{breakdown_attribution_string}', '{self.context.funnelsFilter.funnelOrderType}', - {prop_vals}, + {self._prop_vals()}, {self.udf_event_array_filter()} )) as af_tuple, toTimeZone(toDateTime(_toUInt64(af_tuple.1)), '{self.context.team.timezone}') as entrance_period_start, diff --git a/posthog/hogql_queries/insights/funnels/funnel_udf.py b/posthog/hogql_queries/insights/funnels/funnel_udf.py index 1bc07d685da02a..94bfcffb9ad42f 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_udf.py +++ b/posthog/hogql_queries/insights/funnels/funnel_udf.py @@ -1,17 +1,78 @@ -from typing import cast, Optional +from typing import cast, Optional, runtime_checkable from posthog.hogql import ast from posthog.hogql.constants import DEFAULT_RETURNED_ROWS, HogQLQuerySettings from posthog.hogql.parser import parse_select, parse_expr from posthog.hogql_queries.insights.funnels.base import FunnelBase, JOIN_ALGOS +from posthog.hogql_queries.insights.funnels.funnel_query_context import FunnelQueryContext from posthog.schema import BreakdownType, BreakdownAttributionType from posthog.utils import DATERANGE_MAP +from typing import Protocol + + +@runtime_checkable +class FunnelProtocol(Protocol): + context: FunnelQueryContext + + def _query_has_array_breakdown(self) -> bool: ... + + def _default_breakdown_selector(self) -> str: ... + + TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S" HUMAN_READABLE_TIMESTAMP_FORMAT = "%-d-%b-%Y" -class FunnelUDF(FunnelBase): +class FunnelUDFMixin: + def _add_breakdown_attribution_subquery(self: FunnelProtocol, inner_query: ast.SelectQuery) -> ast.SelectQuery: + breakdown, breakdownAttributionType = ( + self.context.breakdown, + self.context.breakdownAttributionType, + ) + + if breakdownAttributionType in [ + BreakdownAttributionType.FIRST_TOUCH, + BreakdownAttributionType.LAST_TOUCH, + ]: + # When breaking down by first/last touch, each person can only have one prop value + # so just select that. Except for the empty case, where we select the default. + + if self._query_has_array_breakdown(): + assert isinstance(breakdown, list) + default_breakdown_value = f"""[{','.join(["''" for _ in range(len(breakdown or []))])}]""" + # default is [''] when dealing with a single breakdown array, otherwise ['', '', ...., ''] + breakdown_selector = parse_expr( + f"if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, {default_breakdown_value})" + ) + else: + breakdown_selector = ast.Field(chain=["prop_vals"]) + + return ast.SelectQuery( + select=[ast.Field(chain=["*"]), ast.Alias(alias="prop", expr=breakdown_selector)], + select_from=ast.JoinExpr(table=inner_query), + ) + + return inner_query + + def _prop_vals(self: FunnelProtocol): + prop_vals = f"[{self._default_breakdown_selector()}]" + if self.context.breakdown: + if self.context.breakdownAttributionType == BreakdownAttributionType.STEP: + prop = f"prop_{self.context.funnelsFilter.breakdownAttributionValue}" + else: + prop = "prop" + if self._query_has_array_breakdown(): + prop_vals = f"groupUniqArrayIf({prop}, {prop} != [])" + else: + prop_vals = f"groupUniqArray({prop})" + return prop_vals + + def _default_breakdown_selector(self: FunnelProtocol) -> str: + return "[]" if self._query_has_array_breakdown() else "''" + + +class FunnelUDF(FunnelUDFMixin, FunnelBase): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # In base, these fields only get added if you're running an actors query @@ -50,8 +111,6 @@ def _inner_aggregation_query(self): else: inner_event_query = self._get_inner_event_query_for_udf(entity_name="events") - default_breakdown_selector = "[]" if self._query_has_array_breakdown() else "''" - # stores the steps as an array of integers from 1 to max_steps # so if the event could be step_0, step_1 or step_4, it looks like [1,2,0,0,5] @@ -73,8 +132,9 @@ def _inner_aggregation_query(self): fn = "aggregate_funnel" breakdown_prop = "" - prop_selector = "prop" if self.context.breakdown else default_breakdown_selector - prop_vals = "groupUniqArray(prop)" if self.context.breakdown else f"[{default_breakdown_selector}]" + prop_selector = "prop" if self.context.breakdown else self._default_breakdown_selector() + + prop_vals = self._prop_vals() breakdown_attribution_string = f"{self.context.breakdownAttributionType}{f'_{self.context.funnelsFilter.breakdownAttributionValue}' if self.context.breakdownAttributionType == BreakdownAttributionType.STEP else ''}" diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr index f2478d4564da36..62ae8ccd05b019 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr @@ -27,7 +27,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -120,7 +120,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -206,7 +206,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr index 9eba2f69086b82..d1b1edaa350d1b 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr @@ -22,7 +22,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -74,7 +74,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -137,7 +137,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -202,7 +202,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -266,7 +266,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -361,7 +361,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -456,7 +456,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -551,7 +551,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -645,7 +645,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -710,7 +710,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -774,7 +774,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -869,7 +869,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -964,7 +964,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1059,7 +1059,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1155,7 +1155,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1202,7 +1202,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1260,7 +1260,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1307,7 +1307,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1360,7 +1360,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1405,7 +1405,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1461,7 +1461,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1524,7 +1524,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1587,7 +1587,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1650,7 +1650,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1710,7 +1710,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1763,7 +1763,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1827,7 +1827,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1890,7 +1890,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1950,7 +1950,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1995,7 +1995,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2051,7 +2051,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2114,7 +2114,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2177,7 +2177,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2240,7 +2240,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2300,7 +2300,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2353,7 +2353,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2417,7 +2417,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2480,7 +2480,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2543,7 +2543,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2600,7 +2600,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2655,7 +2655,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2725,7 +2725,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2795,7 +2795,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2865,7 +2865,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2936,7 +2936,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -2993,7 +2993,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3049,7 +3049,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3106,7 +3106,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3161,7 +3161,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3231,7 +3231,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3301,7 +3301,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3371,7 +3371,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3442,7 +3442,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3499,7 +3499,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3555,7 +3555,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3612,7 +3612,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3667,7 +3667,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3737,7 +3737,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3807,7 +3807,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3877,7 +3877,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -3948,7 +3948,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4005,7 +4005,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4061,7 +4061,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4118,7 +4118,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4173,7 +4173,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4243,7 +4243,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4313,7 +4313,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4383,7 +4383,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4454,7 +4454,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4511,7 +4511,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4567,7 +4567,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4624,7 +4624,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4679,7 +4679,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4749,7 +4749,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4819,7 +4819,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4889,7 +4889,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -4960,7 +4960,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -5017,7 +5017,7 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr index a03bed3fcfab6b..742e6b15740d6f 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr @@ -10,7 +10,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -85,7 +85,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -160,7 +160,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr index e1da2a48386bf0..dd6ed08f95fcca 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr @@ -1106,243 +1106,3 @@ allow_experimental_analyzer=1 ''' # --- -# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.5 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.6 - ''' - - SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target, - steps, - avg(step_1_conversion_time) step_1_average_conversion_time_inner, - avg(step_2_conversion_time) step_2_average_conversion_time_inner, - median(step_1_conversion_time) step_1_median_conversion_time_inner, - median(step_2_conversion_time) step_2_median_conversion_time_inner, - prop - FROM - (SELECT aggregation_target, - steps, - max(steps) over (PARTITION BY aggregation_target, - prop) as max_steps, - step_1_conversion_time, - step_2_conversion_time, - prop - FROM - (SELECT *, - if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY - AND latest_1 <= latest_2 - AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, - if(isNotNull(latest_1) - AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, - if(isNotNull(latest_2) - AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'sign up', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'play movie', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'buy', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (1=1) ))) - WHERE step_0 = 1 )) - GROUP BY aggregation_target, - steps, - prop - HAVING steps = max(max_steps)) - WHERE steps IN [1, 2, 3] - AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) - ORDER BY aggregation_target - LIMIT 100 - OFFSET 0 SETTINGS max_ast_elements=1000000, - max_expanded_ast_elements=1000000 - ''' -# --- -# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.7 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.8 - ''' - - SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target, - steps, - avg(step_1_conversion_time) step_1_average_conversion_time_inner, - avg(step_2_conversion_time) step_2_average_conversion_time_inner, - median(step_1_conversion_time) step_1_median_conversion_time_inner, - median(step_2_conversion_time) step_2_median_conversion_time_inner, - prop - FROM - (SELECT aggregation_target, - steps, - max(steps) over (PARTITION BY aggregation_target, - prop) as max_steps, - step_1_conversion_time, - step_2_conversion_time, - prop - FROM - (SELECT *, - if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY - AND latest_1 <= latest_2 - AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, - if(isNotNull(latest_1) - AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, - if(isNotNull(latest_2) - AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'sign up', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'play movie', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'buy', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (1=1) ))) - WHERE step_0 = 1 )) - GROUP BY aggregation_target, - steps, - prop - HAVING steps = max(max_steps)) - WHERE steps IN [2, 3] - AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) - ORDER BY aggregation_target - LIMIT 100 - OFFSET 0 SETTINGS max_ast_elements=1000000, - max_expanded_ast_elements=1000000 - ''' -# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr index cb48bc92ba1a1e..2c3b80f8467e37 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr @@ -10,7 +10,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -85,7 +85,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -160,7 +160,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr index 6c1d250f5fa058..9b7fba21179297 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr @@ -15,11 +15,12 @@ if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) - and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) - and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) - and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) - and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'strict', groupUniqArrayIf(prop, ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([]))), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -89,55 +90,39 @@ if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'step_1', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) - and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) - and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) - and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) - and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'step_1', 'strict', groupUniqArrayIf(prop_1, ifNull(notEquals(prop_1, []), isNotNull(prop_1) + or isNotNull([]))), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, aggregation_target AS aggregation_target FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - uuid AS uuid, - `$session_id` AS `$session_id`, - `$window_id` AS `$window_id`, - step_0 AS step_0, - step_1 AS step_1, - prop_basic AS prop_basic, - prop_0 AS prop_0, - prop_1 AS prop_1, - prop, - prop_vals AS prop_vals, - prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, - e.uuid AS uuid, - e.`$session_id` AS `$session_id`, - e.`$window_id` AS `$window_id`, - if(equals(e.event, 'sign up'), 1, 0) AS step_0, - if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, - [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, - if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, - if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, - prop_1 AS prop, - groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, - person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY - JOIN prop_vals AS prop - WHERE ifNull(notEquals(prop, []), isNotNull(prop) - or isNotNull([]))) + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + e.uuid AS uuid, + e.`$session_id` AS `$session_id`, + e.`$window_id` AS `$window_id`, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 99999), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) GROUP BY breakdown @@ -170,11 +155,12 @@ if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) - and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) - and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) - and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) - and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'strict', groupUniqArrayIf(prop, ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([]))), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -249,7 +235,7 @@ if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -331,7 +317,7 @@ if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -413,7 +399,7 @@ if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -490,7 +476,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -570,7 +556,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -650,7 +636,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -730,7 +716,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -801,243 +787,3 @@ allow_experimental_analyzer=1 ''' # --- -# name: TestStrictFunnelGroupBreakdownUDF.test_funnel_breakdown_group.5 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestStrictFunnelGroupBreakdownUDF.test_funnel_breakdown_group.6 - ''' - - SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target, - steps, - avg(step_1_conversion_time) step_1_average_conversion_time_inner, - avg(step_2_conversion_time) step_2_average_conversion_time_inner, - median(step_1_conversion_time) step_1_median_conversion_time_inner, - median(step_2_conversion_time) step_2_median_conversion_time_inner, - prop - FROM - (SELECT aggregation_target, - steps, - max(steps) over (PARTITION BY aggregation_target, - prop) as max_steps, - step_1_conversion_time, - step_2_conversion_time, - prop - FROM - (SELECT *, - if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY - AND latest_1 <= latest_2 - AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, - if(isNotNull(latest_1) - AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, - if(isNotNull(latest_2) - AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'sign up', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'play movie', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'buy', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (1=1) ))) - WHERE step_0 = 1 )) - GROUP BY aggregation_target, - steps, - prop - HAVING steps = max(max_steps)) - WHERE steps IN [1, 2, 3] - AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) - ORDER BY aggregation_target - LIMIT 100 - OFFSET 0 SETTINGS max_ast_elements=1000000, - max_expanded_ast_elements=1000000 - ''' -# --- -# name: TestStrictFunnelGroupBreakdownUDF.test_funnel_breakdown_group.7 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestStrictFunnelGroupBreakdownUDF.test_funnel_breakdown_group.8 - ''' - - SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target, - steps, - avg(step_1_conversion_time) step_1_average_conversion_time_inner, - avg(step_2_conversion_time) step_2_average_conversion_time_inner, - median(step_1_conversion_time) step_1_median_conversion_time_inner, - median(step_2_conversion_time) step_2_median_conversion_time_inner, - prop - FROM - (SELECT aggregation_target, - steps, - max(steps) over (PARTITION BY aggregation_target, - prop) as max_steps, - step_1_conversion_time, - step_2_conversion_time, - prop - FROM - (SELECT *, - if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY - AND latest_1 <= latest_2 - AND latest_2 <= latest_0 + INTERVAL 7 DAY, 3, if(latest_0 <= latest_1 - AND latest_1 <= latest_0 + INTERVAL 7 DAY, 2, 1)) AS steps, - if(isNotNull(latest_1) - AND latest_1 <= latest_0 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, - if(isNotNull(latest_2) - AND latest_2 <= latest_1 + INTERVAL 7 DAY, dateDiff('second', toDateTime(latest_1), toDateTime(latest_2)), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN 2 PRECEDING AND 2 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'sign up', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'play movie', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'buy', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (1=1) ))) - WHERE step_0 = 1 )) - GROUP BY aggregation_target, - steps, - prop - HAVING steps = max(max_steps)) - WHERE steps IN [2, 3] - AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) - ORDER BY aggregation_target - LIMIT 100 - OFFSET 0 SETTINGS max_ast_elements=1000000, - max_expanded_ast_elements=1000000 - ''' -# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr index cf92dbe9bc035a..d078ffbc040d7c 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr @@ -10,7 +10,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'UTC')), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_trends_v3(0, 2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) + arrayJoin(aggregate_funnel_array_trends_v4(0, 2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) and isNull(x_before.5)), ifNull(equals(x.5, x_after.5), isNull(x.5) and isNull(x_after.5)), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) @@ -83,7 +83,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'UTC')), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_trends_v3(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) + arrayJoin(aggregate_funnel_array_trends_v4(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) and isNull(x_before.5)), ifNull(equals(x.5, x_after.5), isNull(x.5) and isNull(x_after.5)), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) @@ -156,7 +156,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'UTC')), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_trends_v3(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) + arrayJoin(aggregate_funnel_array_trends_v4(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) and isNull(x_before.5)), ifNull(equals(x.5, x_after.5), isNull(x.5) and isNull(x_after.5)), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr index 91bb45c0d1068a..4a77813bdecd21 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr @@ -8,7 +8,7 @@ data.breakdown AS prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'UTC')), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_trends_v3(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) + arrayJoin(aggregate_funnel_array_trends_v4(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) and isNull(x_before.5)), ifNull(equals(x.5, x_after.5), isNull(x.5) and isNull(x_after.5)), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) @@ -61,7 +61,7 @@ data.breakdown AS prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'US/Pacific')), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_trends_v3(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) + arrayJoin(aggregate_funnel_array_trends_v4(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) and isNull(x_before.5)), ifNull(equals(x.5, x_after.5), isNull(x.5) and isNull(x_after.5)), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) @@ -114,7 +114,7 @@ data.breakdown AS prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfWeek(timestamp, 0), 'UTC')), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_trends_v3(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) + arrayJoin(aggregate_funnel_array_trends_v4(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) and isNull(x_before.5)), ifNull(equals(x.5, x_after.5), isNull(x.5) and isNull(x_after.5)), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr index d5e966b306624f..14b3e38b2eb645 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr @@ -20,7 +20,7 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 15, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 15, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -74,7 +74,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 15, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 15, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -150,7 +150,7 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -230,7 +230,7 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -297,7 +297,7 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -362,7 +362,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -429,7 +429,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -496,7 +496,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -578,7 +578,7 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -640,7 +640,7 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -699,11 +699,12 @@ if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) - and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) - and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) - and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) - and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', groupUniqArrayIf(prop, ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([]))), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -773,55 +774,39 @@ if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'step_1', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) - and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) - and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) - and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) - and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'step_1', 'ordered', groupUniqArrayIf(prop_1, ifNull(notEquals(prop_1, []), isNotNull(prop_1) + or isNotNull([]))), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, aggregation_target AS aggregation_target FROM - (SELECT timestamp AS timestamp, - aggregation_target AS aggregation_target, - uuid AS uuid, - `$session_id` AS `$session_id`, - `$window_id` AS `$window_id`, - step_0 AS step_0, - step_1 AS step_1, - prop_basic AS prop_basic, - prop_0 AS prop_0, - prop_1 AS prop_1, - prop, - prop_vals AS prop_vals, - prop - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, - e.uuid AS uuid, - e.`$session_id` AS `$session_id`, - e.`$window_id` AS `$window_id`, - if(equals(e.event, 'sign up'), 1, 0) AS step_0, - if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, - [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, - if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, - if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, - prop_1 AS prop, - groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals - FROM events AS e - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, - person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) ARRAY - JOIN prop_vals AS prop - WHERE ifNull(notEquals(prop, []), isNotNull(prop) - or isNotNull([]))) + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + e.uuid AS uuid, + e.`$session_id` AS `$session_id`, + e.`$window_id` AS `$window_id`, + if(equals(e.event, 'sign up'), 1, 0) AS step_0, + if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1, + [ifNull(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', '')), '')] AS prop_basic, + if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0, + if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1, + prop_1 AS prop, + groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals + FROM events AS e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 99999), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) GROUP BY breakdown @@ -854,11 +839,12 @@ if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v3(2, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) - and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) - and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) - and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) - and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array_v4(2, 1209600, 'first_touch', 'ordered', groupUniqArrayIf(prop, ifNull(notEquals(prop, []), isNotNull(prop) + or isNotNull([]))), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -933,7 +919,7 @@ if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1015,7 +1001,7 @@ if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1097,7 +1083,7 @@ if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1174,7 +1160,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1254,7 +1240,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1334,7 +1320,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) @@ -1414,7 +1400,7 @@ actor_id AS id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v3(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + arrayJoin(aggregate_funnel_v4(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr index 42d681ad3f68bd..d33ab0746b287a 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr @@ -1478,583 +1478,6 @@ allow_experimental_analyzer=1 ''' # --- -# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.10 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.11 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.12 - ''' - - SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target, - steps, - avg(step_1_conversion_time) step_1_average_conversion_time_inner, - avg(step_2_conversion_time) step_2_average_conversion_time_inner, - median(step_1_conversion_time) step_1_median_conversion_time_inner, - median(step_2_conversion_time) step_2_median_conversion_time_inner, - prop - FROM - (SELECT aggregation_target, - steps, - max(steps) over (PARTITION BY aggregation_target, - prop) as max_steps, - step_1_conversion_time, - step_2_conversion_time, - prop - FROM - (SELECT *, - arraySort([latest_0,latest_1,latest_2]) as event_times, - arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps , - arraySort([latest_0,latest_1,latest_2]) as conversion_times, - if(isNotNull(conversion_times[2]) - AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time, - if(isNotNull(conversion_times[3]) - AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'sign up', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'play movie', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'buy', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))) - WHERE step_0 = 1 - UNION ALL SELECT *, - arraySort([latest_0,latest_1,latest_2]) as event_times, - arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps , - arraySort([latest_0,latest_1,latest_2]) as conversion_times, - if(isNotNull(conversion_times[2]) - AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time, - if(isNotNull(conversion_times[3]) - AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'play movie', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'buy', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'sign up', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))) - WHERE step_0 = 1 - UNION ALL SELECT *, - arraySort([latest_0,latest_1,latest_2]) as event_times, - arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps , - arraySort([latest_0,latest_1,latest_2]) as conversion_times, - if(isNotNull(conversion_times[2]) - AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time, - if(isNotNull(conversion_times[3]) - AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'buy', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'sign up', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'play movie', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))) - WHERE step_0 = 1 )) - GROUP BY aggregation_target, - steps, - prop - HAVING steps = max(max_steps)) - WHERE steps IN [1, 2, 3] - AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) - ORDER BY aggregation_target - LIMIT 100 - OFFSET 0 SETTINGS max_ast_elements=1000000, - max_expanded_ast_elements=1000000 - ''' -# --- -# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.13 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.14 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.15 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.16 - ''' - - SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target, - steps, - avg(step_1_conversion_time) step_1_average_conversion_time_inner, - avg(step_2_conversion_time) step_2_average_conversion_time_inner, - median(step_1_conversion_time) step_1_median_conversion_time_inner, - median(step_2_conversion_time) step_2_median_conversion_time_inner, - prop - FROM - (SELECT aggregation_target, - steps, - max(steps) over (PARTITION BY aggregation_target, - prop) as max_steps, - step_1_conversion_time, - step_2_conversion_time, - prop - FROM - (SELECT *, - arraySort([latest_0,latest_1,latest_2]) as event_times, - arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps , - arraySort([latest_0,latest_1,latest_2]) as conversion_times, - if(isNotNull(conversion_times[2]) - AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time, - if(isNotNull(conversion_times[3]) - AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'sign up', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'play movie', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'buy', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))) - WHERE step_0 = 1 - UNION ALL SELECT *, - arraySort([latest_0,latest_1,latest_2]) as event_times, - arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps , - arraySort([latest_0,latest_1,latest_2]) as conversion_times, - if(isNotNull(conversion_times[2]) - AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time, - if(isNotNull(conversion_times[3]) - AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'play movie', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'buy', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'sign up', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))) - WHERE step_0 = 1 - UNION ALL SELECT *, - arraySort([latest_0,latest_1,latest_2]) as event_times, - arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps , - arraySort([latest_0,latest_1,latest_2]) as conversion_times, - if(isNotNull(conversion_times[2]) - AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time, - if(isNotNull(conversion_times[3]) - AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'buy', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'sign up', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'play movie', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))) - WHERE step_0 = 1 )) - GROUP BY aggregation_target, - steps, - prop - HAVING steps = max(max_steps)) - WHERE steps IN [2, 3] - AND arrayFlatten(array(prop)) = arrayFlatten(array('technology')) - ORDER BY aggregation_target - LIMIT 100 - OFFSET 0 SETTINGS max_ast_elements=1000000, - max_expanded_ast_elements=1000000 - ''' -# --- # name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.2 ''' SELECT source.id, @@ -2832,326 +2255,3 @@ allow_experimental_analyzer=1 ''' # --- -# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.5 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.6 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.7 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.8 - ''' - - SELECT aggregation_target AS actor_id - FROM - (SELECT aggregation_target, - steps, - avg(step_1_conversion_time) step_1_average_conversion_time_inner, - avg(step_2_conversion_time) step_2_average_conversion_time_inner, - median(step_1_conversion_time) step_1_median_conversion_time_inner, - median(step_2_conversion_time) step_2_median_conversion_time_inner, - prop - FROM - (SELECT aggregation_target, - steps, - max(steps) over (PARTITION BY aggregation_target, - prop) as max_steps, - step_1_conversion_time, - step_2_conversion_time, - prop - FROM - (SELECT *, - arraySort([latest_0,latest_1,latest_2]) as event_times, - arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps , - arraySort([latest_0,latest_1,latest_2]) as conversion_times, - if(isNotNull(conversion_times[2]) - AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time, - if(isNotNull(conversion_times[3]) - AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'sign up', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'play movie', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'buy', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))) - WHERE step_0 = 1 - UNION ALL SELECT *, - arraySort([latest_0,latest_1,latest_2]) as event_times, - arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps , - arraySort([latest_0,latest_1,latest_2]) as conversion_times, - if(isNotNull(conversion_times[2]) - AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time, - if(isNotNull(conversion_times[3]) - AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'play movie', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'buy', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'sign up', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))) - WHERE step_0 = 1 - UNION ALL SELECT *, - arraySort([latest_0,latest_1,latest_2]) as event_times, - arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps , - arraySort([latest_0,latest_1,latest_2]) as conversion_times, - if(isNotNull(conversion_times[2]) - AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time, - if(isNotNull(conversion_times[3]) - AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time - FROM - (SELECT aggregation_target, timestamp, step_0, - latest_0, - step_1, - min(latest_1) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1, - step_2, - min(latest_2) over (PARTITION by aggregation_target, - prop - ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 , - if(has(['technology', 'finance'], prop), prop, 'Other') as prop - FROM - (SELECT *, - prop_vals as prop - FROM - (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, - if(event = 'buy', 1, 0) as step_0, - if(step_0 = 1, timestamp, null) as latest_0, - if(event = 'sign up', 1, 0) as step_1, - if(step_1 = 1, timestamp, null) as latest_1, - if(event = 'play movie', 1, 0) as step_2, - if(step_2 = 1, timestamp, null) as latest_2, - replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic, - prop_basic as prop, - argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals - FROM events e - LEFT OUTER JOIN - (SELECT distinct_id, - argMax(person_id, version) as person_id - FROM person_distinct_id2 - WHERE team_id = 99999 - AND distinct_id IN - (SELECT distinct_id - FROM events - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') ) - GROUP BY distinct_id - HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - AND (step_0 = 1 - OR step_1 = 1 - OR step_2 = 1) ))) - WHERE step_0 = 1 )) - GROUP BY aggregation_target, - steps, - prop - HAVING steps = max(max_steps)) - WHERE steps IN [2, 3] - AND arrayFlatten(array(prop)) = arrayFlatten(array('finance')) - ORDER BY aggregation_target - LIMIT 100 - OFFSET 0 SETTINGS max_ast_elements=1000000, - max_expanded_ast_elements=1000000 - ''' -# --- -# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.9 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value, - count(*) as count - FROM events e - LEFT JOIN - (SELECT group_key, - argMax(group_properties, _timestamp) AS group_properties_0 - FROM groups - WHERE team_id = 99999 - AND group_type_index = 0 - GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key - WHERE team_id = 99999 - AND event IN ['buy', 'play movie', 'sign up'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel.py b/posthog/hogql_queries/insights/funnels/test/test_funnel.py index b92891822cb873..2c762f279331b4 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel.py @@ -4429,6 +4429,20 @@ def test_first_time_for_user_funnel_person_properties(self): # classic and udf funnels handle no events differently assert len(results) == 0 or results[0]["count"] == 0 + _create_event( + team=self.team, + event="event2", + distinct_id="user_1", + timestamp="2024-03-19T13:00:00Z", + properties={"property": "woah"}, + ) + query.series[0].math = FunnelMathType.FIRST_TIME_FOR_USER_WITH_FILTERS + assert query.dateRange is not None + query.dateRange.date_from = "2024-03-19" + results = FunnelsQueryRunner(query=query, team=self.team).calculate().results + self.assertEqual(results[0]["count"], 1) + self.assertEqual(results[1]["count"], 1) + def test_funnel_personless_events_are_supported(self): user_id = uuid.uuid4() _create_event( @@ -4576,6 +4590,95 @@ def test_excluded_completion(self): self.assertEqual(0, results[0]["count"]) self.assertEqual(0, results[1]["count"]) + def test_breakdown_step_attributions(self): + events = [ + { + "event": "step one", + "properties": {"$browser": "Chrome"}, + "timestamp": datetime(2021, 5, 1, 0, 0, 0), + }, + { + "event": "step two", + "timestamp": datetime(2021, 5, 1, 0, 0, 1), + "properties": {"$browser": "Safari"}, + }, + { + "event": "step two", + "timestamp": datetime(2021, 5, 1, 0, 0, 2), + "properties": {"$browser": "Chrome"}, + }, + { + "event": "step three", + "timestamp": datetime(2021, 5, 1, 0, 0, 3), + "properties": {"$browser": "Chrome"}, + }, + ] + + journeys_for( + { + "user_one": events, + }, + self.team, + ) + + filters = { + "insight": INSIGHT_FUNNELS, + "funnel_viz_type": "steps", + "date_from": "2021-05-01 00:00:00", + "date_to": "2021-05-02 23:59:59", + "funnel_window_interval": 30, + "funnel_window_interval_unit": "second", + "events": [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + {"id": "step three", "order": 2}, + ], + "breakdown_type": "event", + "breakdown": "$browser", + } + + query = cast(FunnelsQuery, filter_to_query(filters)) + results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate().results + assert 1 == len(results) + result = results[0] + assert 3 == len(result) + assert [x["count"] == 1 for x in result] + assert [x["breakdown"] == ["Chrome"] for x in result] + + filters["breakdown_attribution_type"] = "all_events" + query = cast(FunnelsQuery, filter_to_query(filters)) + results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate().results + assert 1 == len(results) + result = results[0] + assert [x["count"] for x in result] == [1, 1, 1] + assert [x["breakdown"] == ["Chrome"] for x in result] + + filters["breakdown_attribution_type"] = "step" + filters["breakdown_attribution_value"] = 0 + query = cast(FunnelsQuery, filter_to_query(filters)) + results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate().results + assert 1 == len(results) + result = results[0] + assert [x["count"] for x in result] == [1, 1, 1] + assert [x["breakdown"] == ["Chrome"] for x in result] + + filters["breakdown_attribution_type"] = "step" + filters["breakdown_attribution_value"] = 1 + query = cast(FunnelsQuery, filter_to_query(filters)) + results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate().results + assert 2 == len(results) + for result in results: + assert [x["count"] for x in result] == [1, 1, 1] + + filters["breakdown_attribution_type"] = "step" + filters["breakdown_attribution_value"] = 2 + query = cast(FunnelsQuery, filter_to_query(filters)) + results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate().results + assert 1 == len(results) + result = results[0] + assert [x["count"] for x in result] == [1, 1, 1] + assert [x["breakdown"] == ["Chrome"] for x in result] + return TestGetFunnel diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py index 5dc01e90fa1a45..2bf698a26ffc5c 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py @@ -2342,6 +2342,205 @@ def test_excluded_completion(self): self.assertEqual(0, results[0]["reached_from_step_count"]) self.assertEqual(0, results[0]["reached_to_step_count"]) + def test_breakdown_with_attribution(self): + events = [ + { + "event": "step one", + "properties": {"$browser": "Chrome"}, + "timestamp": datetime(2021, 5, 1, 0, 0, 0), + }, + { + "event": "step one", + "properties": {"$browser": "Safari"}, + "timestamp": datetime(2021, 5, 1, 0, 0, 1), + }, + { + "event": "step two", + "timestamp": datetime(2021, 5, 1, 0, 0, 14), + "properties": {"$browser": "Chrome"}, + }, + { + "event": "step one", + "properties": {"$browser": "Chrome"}, + "timestamp": datetime(2021, 5, 2, 0, 0, 0), + }, + { + "event": "step two", + "properties": {"$browser": "Safari"}, + "timestamp": datetime(2021, 5, 2, 0, 0, 1), + }, + { + "event": "step two", + "timestamp": datetime(2021, 5, 2, 0, 0, 14), + "properties": {"$browser": "Chrome"}, + }, + ] + journeys_for( + { + "user_one": events, + }, + self.team, + ) + + filters = { + "insight": INSIGHT_FUNNELS, + "funnel_viz_type": "trends", + "interval": "day", + "date_from": "2021-05-01 00:00:00", + "date_to": "2021-05-02 23:59:59", + "funnel_window_interval": 30, + "funnel_window_interval_unit": "second", + "events": [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + ], + "breakdown_type": "event", + "breakdown": "$browser", + } + + query = cast(FunnelsQuery, filter_to_query(filters)) + results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate().results + + assert 2 == len(results) + assert [1, 1] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [1, 1] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + + filters["breakdown_attribution_type"] = "all_events" + query = cast(FunnelsQuery, filter_to_query(filters)) + results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate().results + + assert 4 == len(results) + assert [1, 1] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [1, 1] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [1, 0] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Safari"]] + assert [0, 0] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Safari"]] + + filters["breakdown_attribution_type"] = "step" + filters["breakdown_attribution_value"] = 0 + query = cast(FunnelsQuery, filter_to_query(filters)) + full_results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate() + results = full_results.results + + # Normal funnels fail on this + if full_results.isUdf: + assert 4 == len(results) + assert [1, 1] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [1, 1] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [1, 0] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Safari"]] + assert [1, 0] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Safari"]] + + filters["breakdown_attribution_type"] = "step" + filters["breakdown_attribution_value"] = 1 + query = cast(FunnelsQuery, filter_to_query(filters)) + results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate().results + + assert 4 == len(results) + assert [1, 1] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [1, 1] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [1, 1] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Safari"]] + assert [0, 1] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Safari"]] + + def test_breakdown_with_attribution_2(self): + events = [ + { + "event": "step one", + "properties": {"$browser": "Chrome"}, + "timestamp": datetime(2021, 5, 1, 0, 0, 0), + }, + { + "event": "step two", + "timestamp": datetime(2021, 5, 1, 0, 0, 1), + "properties": {"$browser": "Chrome"}, + }, + { + "event": "step three", + "timestamp": datetime(2021, 5, 1, 0, 0, 2), + "properties": {"$browser": "Safari"}, + }, + { + "event": "step one", + "properties": {"$browser": "Safari"}, + "timestamp": datetime(2021, 5, 2, 0, 0, 0), + }, + { + "event": "step two", + "timestamp": datetime(2021, 5, 2, 0, 0, 1), + "properties": {"$browser": "Safari"}, + }, + { + "event": "step three", + "timestamp": datetime(2021, 5, 2, 0, 0, 2), + "properties": {"$browser": "Chrome"}, + }, + ] + + journeys_for( + { + "user_one": events, + }, + self.team, + ) + + filters = { + "insight": INSIGHT_FUNNELS, + "funnel_viz_type": "trends", + "interval": "day", + "date_from": "2021-05-01 00:00:00", + "date_to": "2021-05-02 23:59:59", + "funnel_window_interval": 30, + "funnel_window_interval_unit": "second", + "events": [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + {"id": "step three", "order": 2}, + ], + "breakdown_type": "event", + "breakdown": "$browser", + "funnel_from_step": 0, + "funnel_to_step": 2, + } + + query = cast(FunnelsQuery, filter_to_query(filters)) + results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate().results + + assert 2 == len(results) + assert [1, 1] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [1, 1] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + + filters["breakdown_attribution_type"] = "all_events" + query = cast(FunnelsQuery, filter_to_query(filters)) + results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate().results + + assert 4 == len(results) + assert [1, 0] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [0, 0] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [0, 1] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Safari"]] + assert [0, 0] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Safari"]] + + filters["breakdown_attribution_type"] = "step" + filters["breakdown_attribution_value"] = 0 + query = cast(FunnelsQuery, filter_to_query(filters)) + full_results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate() + results = full_results.results + + if full_results.isUdf: + assert 4 == len(results) + assert [1, 0] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [1, 0] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [0, 1] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Safari"]] + assert [0, 1] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Safari"]] + + filters["breakdown_attribution_type"] = "step" + filters["breakdown_attribution_value"] = 2 + query = cast(FunnelsQuery, filter_to_query(filters)) + results = FunnelsQueryRunner(query=query, team=self.team, just_summarize=True).calculate().results + + assert 4 == len(results) + assert [1, 1] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [0, 1] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Chrome"]] + assert [1, 1] == [x["reached_from_step_count"] for x in results if x["breakdown_value"] == ["Safari"]] + assert [1, 0] == [x["reached_to_step_count"] for x in results if x["breakdown_value"] == ["Safari"]] + class TestFunnelTrends(BaseTestFunnelTrends): __test__ = True diff --git a/posthog/hogql_queries/insights/trends/aggregation_operations.py b/posthog/hogql_queries/insights/trends/aggregation_operations.py index 84e7c181f7c13c..9e88bfca8b4aa3 100644 --- a/posthog/hogql_queries/insights/trends/aggregation_operations.py +++ b/posthog/hogql_queries/insights/trends/aggregation_operations.py @@ -92,6 +92,7 @@ def requires_query_orchestration(self) -> bool: "weekly_active", "monthly_active", "first_time_for_user", + "first_matching_event_for_user", ] return self.is_count_per_actor_variant() or self.series.math in math_to_return_true @@ -116,6 +117,9 @@ def is_active_users_math(self): def is_first_time_ever_math(self): return self.series.math == "first_time_for_user" + def is_first_matching_event(self): + return self.series.math == "first_matching_event_for_user" + def _math_func(self, method: str, override_chain: Optional[list[str | int]]) -> ast.Call: if override_chain is not None: return ast.Call(name=method, args=[ast.Field(chain=override_chain)]) @@ -452,7 +456,11 @@ def _first_time_parent_query(self, inner_query: ast.SelectQuery): return query def get_first_time_math_query_orchestrator( - self, events_where_clause: ast.Expr, sample_value: ast.RatioExpr, event_name_filter: ast.Expr | None = None + self, + events_where_clause: ast.Expr, + sample_value: ast.RatioExpr, + event_name_filter: ast.Expr | None = None, + is_first_matching_event: bool = False, ): date_placeholders = self.query_date_range.to_placeholders() date_from = parse_expr( @@ -479,6 +487,7 @@ def __init__(self): filters=events_where_clause, event_or_action_filter=event_name_filter, ratio=sample_value, + is_first_matching_event=is_first_matching_event, ) self.parent_query_builder = QueryAlternator(parent_select) diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr index 9bd3a90b8d5593..729923069bad68 100644 --- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr +++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr @@ -847,26 +847,96 @@ # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + breakdown_value AS breakdown_value + FROM + (SELECT count(DISTINCT if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value + FROM events AS e SAMPLE 1.0 + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY day_start, + breakdown_value) + GROUP BY day_start, + breakdown_value + ORDER BY day_start ASC, breakdown_value ASC) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE isNotNull(breakdown_value) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.1 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + breakdown_value AS breakdown_value + FROM + (SELECT count(DISTINCT if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value + FROM events AS e SAMPLE 1.0 + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY day_start, + breakdown_value) + GROUP BY day_start, + breakdown_value + ORDER BY day_start ASC, breakdown_value ASC) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE isNotNull(breakdown_value) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.10 @@ -1083,38 +1153,143 @@ # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + [ifNull(toString(breakdown_value_1), '$$_posthog_breakdown_null_$$')] AS breakdown_value + FROM + (SELECT count(DISTINCT if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value_1 + FROM events AS e SAMPLE 1.0 + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY day_start, + breakdown_value_1) + GROUP BY day_start, + breakdown_value_1 + ORDER BY day_start ASC, breakdown_value ASC) + GROUP BY breakdown_value + ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE arrayExists(x -> isNotNull(x), breakdown_value) + GROUP BY breakdown_value + ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.3 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + [ifNull(toString(breakdown_value_1), '$$_posthog_breakdown_null_$$')] AS breakdown_value + FROM + (SELECT count(DISTINCT if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value_1 + FROM events AS e SAMPLE 1.0 + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY day_start, + breakdown_value_1) + GROUP BY day_start, + breakdown_value_1 + ORDER BY day_start ASC, breakdown_value ASC) + GROUP BY breakdown_value + ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE arrayExists(x -> isNotNull(x), breakdown_value) + GROUP BY breakdown_value + ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.4 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + [ifNull(toString(breakdown_value_1), '$$_posthog_breakdown_null_$$')] AS breakdown_value + FROM + (SELECT count(DISTINCT if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value_1 + FROM events AS e SAMPLE 1.0 + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY day_start, + breakdown_value_1) + GROUP BY day_start, + breakdown_value_1 + ORDER BY day_start ASC, breakdown_value ASC) + GROUP BY breakdown_value + ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE arrayExists(x -> isNotNull(x), breakdown_value) + GROUP BY breakdown_value + ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.5 @@ -1791,18 +1966,6 @@ # --- # name: TestTrends.test_person_filtering_in_cohort_in_action ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: TestTrends.test_person_filtering_in_cohort_in_action.1 - ''' SELECT count(DISTINCT person_id) FROM cohortpeople @@ -1811,7 +1974,7 @@ AND version = NULL ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action.2 +# name: TestTrends.test_person_filtering_in_cohort_in_action.1 ''' /* cohort_calculation: */ SELECT count(DISTINCT person_id) @@ -1821,7 +1984,7 @@ AND version = 0 ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action.3 +# name: TestTrends.test_person_filtering_in_cohort_in_action.2 ''' SELECT groupArray(1)(date)[1] AS date, arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, @@ -1871,19 +2034,57 @@ max_bytes_before_external_group_by=0 ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2 +# name: TestTrends.test_person_filtering_in_cohort_in_action.3 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + breakdown_value AS breakdown_value + FROM + (SELECT count() AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value + FROM events AS e SAMPLE 1 + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), and(equals(e.event, 'sign up'), ifNull(in(if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id), + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))), 0))) + GROUP BY day_start, + breakdown_value) + GROUP BY day_start, + breakdown_value + ORDER BY day_start ASC, breakdown_value ASC) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE isNotNull(breakdown_value) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.1 +# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2 ''' SELECT count(DISTINCT person_id) @@ -1893,7 +2094,7 @@ AND version = NULL ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.2 +# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.1 ''' /* cohort_calculation: */ SELECT count(DISTINCT person_id) @@ -1903,6 +2104,56 @@ AND version = 0 ''' # --- +# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.2 + ''' + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + breakdown_value AS breakdown_value + FROM + (SELECT count() AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value + FROM events AS e SAMPLE 1 + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), and(equals(e.event, 'sign up'), ifNull(in(if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id), + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))), 0))) + GROUP BY day_start, + breakdown_value) + GROUP BY day_start, + breakdown_value + ORDER BY day_start ASC, breakdown_value ASC) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE isNotNull(breakdown_value) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- # name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.3 ''' SELECT groupArray(1)(date)[1] AS date, @@ -3659,14 +3910,28 @@ # --- # name: TestTrends.test_trends_any_event_total_count ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total + FROM + (SELECT sum(total) AS count, + day_start AS day_start + FROM + (SELECT count() AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start + FROM events AS e SAMPLE 1 + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')))) + GROUP BY day_start) + GROUP BY day_start + ORDER BY day_start ASC) + ORDER BY arraySum(total) DESC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 ''' # --- # name: TestTrends.test_trends_any_event_total_count.1 @@ -3681,7 +3946,7 @@ (SELECT count() AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start FROM events AS e SAMPLE 1 - WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')))) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start) GROUP BY day_start ORDER BY day_start ASC) @@ -3723,14 +3988,55 @@ # --- # name: TestTrends.test_trends_breakdown_cumulative ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT day_start AS day_start, + sum(count) OVER (PARTITION BY breakdown_value + ORDER BY day_start ASC) AS count, + breakdown_value AS breakdown_value + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + breakdown_value AS breakdown_value + FROM + (SELECT count(DISTINCT if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) AS total, + min(toStartOfDay(toTimeZone(e.timestamp, 'UTC'))) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value + FROM events AS e SAMPLE 1 + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id), + breakdown_value) + GROUP BY day_start, + breakdown_value + ORDER BY day_start ASC, breakdown_value ASC) + ORDER BY day_start ASC) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE isNotNull(breakdown_value) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 ''' # --- # name: TestTrends.test_trends_breakdown_cumulative.1 @@ -3788,14 +4094,55 @@ # --- # name: TestTrends.test_trends_breakdown_cumulative_poe_v2 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + SELECT groupArray(1)(date)[1] AS date, + arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, + if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value + FROM + (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total, + breakdown_value AS breakdown_value, + rowNumberInAllBlocks() AS row_number + FROM + (SELECT day_start AS day_start, + sum(count) OVER (PARTITION BY breakdown_value + ORDER BY day_start ASC) AS count, + breakdown_value AS breakdown_value + FROM + (SELECT sum(total) AS count, + day_start AS day_start, + breakdown_value AS breakdown_value + FROM + (SELECT count(DISTINCT if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) AS total, + min(toStartOfDay(toTimeZone(e.timestamp, 'UTC'))) AS day_start, + ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value + FROM events AS e SAMPLE 1 + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id), + breakdown_value) + GROUP BY day_start, + breakdown_value + ORDER BY day_start ASC, breakdown_value ASC) + ORDER BY day_start ASC) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) + WHERE isNotNull(breakdown_value) + GROUP BY breakdown_value + ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 ''' # --- # name: TestTrends.test_trends_breakdown_cumulative_poe_v2.1 @@ -4092,19 +4439,33 @@ # --- # name: TestTrends.test_trends_compare_day_interval_relative_range ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total + FROM + (SELECT sum(total) AS count, + day_start AS day_start + FROM + (SELECT count() AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start + FROM events AS e SAMPLE 1 + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY day_start) + GROUP BY day_start + ORDER BY day_start ASC) + ORDER BY arraySum(total) DESC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 ''' # --- # name: TestTrends.test_trends_compare_day_interval_relative_range.1 ''' - SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-21 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-21 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 23:59:59', 6, 'UTC'))))), 1))) AS date, arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total FROM @@ -4114,7 +4475,7 @@ (SELECT count() AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start FROM events AS e SAMPLE 1 - WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-21 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start) GROUP BY day_start ORDER BY day_start ASC) @@ -4130,7 +4491,7 @@ # --- # name: TestTrends.test_trends_compare_day_interval_relative_range.2 ''' - SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-21 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-21 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 23:59:59', 6, 'UTC'))))), 1))) AS date, + SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total FROM @@ -4140,7 +4501,7 @@ (SELECT count() AS total, toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start FROM events AS e SAMPLE 1 - WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-21 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start) GROUP BY day_start ORDER BY day_start ASC) @@ -4422,14 +4783,33 @@ # --- # name: TestTrends.test_trends_per_day_cumulative ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total + FROM + (SELECT day_start AS day_start, + sum(count) OVER ( + ORDER BY day_start ASC) AS count + FROM + (SELECT sum(total) AS count, + day_start AS day_start + FROM + (SELECT count() AS total, + toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start + FROM events AS e SAMPLE 1 + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY day_start) + GROUP BY day_start + ORDER BY day_start ASC) + ORDER BY day_start ASC) + ORDER BY arraySum(total) DESC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 ''' # --- # name: TestTrends.test_trends_per_day_cumulative.1 @@ -4465,14 +4845,40 @@ # --- # name: TestTrends.test_trends_per_day_dau_cumulative ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, + arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) + and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total + FROM + (SELECT day_start AS day_start, + sum(count) OVER ( + ORDER BY day_start ASC) AS count + FROM + (SELECT sum(total) AS count, + day_start AS day_start + FROM + (SELECT count(DISTINCT if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) AS total, + min(toStartOfDay(toTimeZone(e.timestamp, 'UTC'))) AS day_start + FROM events AS e SAMPLE 1 + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + GROUP BY if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) + GROUP BY day_start + ORDER BY day_start ASC) + ORDER BY day_start ASC) + ORDER BY arraySum(total) DESC + LIMIT 50000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 ''' # --- # name: TestTrends.test_trends_per_day_dau_cumulative.1 diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py index e72c4b66b4d792..c3c7d04a71af64 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py @@ -5013,3 +5013,57 @@ def test_trends_aggregation_total_with_null(self): assert len(response.results) == 1 assert response.results[0]["data"] == [1.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.1] + + def test_trends_aggregation_first_matching_event_for_user(self): + _create_person( + team=self.team, + distinct_ids=["p1"], + properties={}, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-08T12:00:00Z", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-09T12:00:00Z", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-10T12:00:00Z", + properties={"$browser": "Firefox"}, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-11T12:00:00Z", + properties={"$browser": "Firefox"}, + ) + flush_persons_and_events() + + response = self._run_trends_query( + "2020-01-08", + "2020-01-11", + IntervalType.DAY, + [ + EventsNode( + event="$pageview", + math=BaseMathType.FIRST_MATCHING_EVENT_FOR_USER, + properties=[EventPropertyFilter(key="$browser", operator=PropertyOperator.EXACT, value="Firefox")], + ) + ], + TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH), + ) + + assert len(response.results) == 1 + assert response.results[0]["count"] == 1 + assert response.results[0]["data"] == [0, 0, 1, 0] diff --git a/posthog/hogql_queries/insights/trends/trends_query_builder.py b/posthog/hogql_queries/insights/trends/trends_query_builder.py index 826d52c1e556f7..a0e1b185ce8069 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_builder.py +++ b/posthog/hogql_queries/insights/trends/trends_query_builder.py @@ -294,14 +294,15 @@ def _get_events_subquery( return wrapper # Just complex series aggregation - elif ( - self._aggregation_operation.requires_query_orchestration() - and self._aggregation_operation.is_first_time_ever_math() + elif self._aggregation_operation.requires_query_orchestration() and ( + self._aggregation_operation.is_first_time_ever_math() + or self._aggregation_operation.is_first_matching_event() ): return self._aggregation_operation.get_first_time_math_query_orchestrator( events_where_clause=events_filter, sample_value=self._sample_value(), event_name_filter=self._event_or_action_where_expr(), + is_first_matching_event=self._aggregation_operation.is_first_matching_event(), ).build() elif self._aggregation_operation.requires_query_orchestration(): return self._aggregation_operation.get_actors_query_orchestrator( diff --git a/posthog/hogql_queries/insights/utils/aggregations.py b/posthog/hogql_queries/insights/utils/aggregations.py index cb4fd72377759e..bdf30f527d921c 100644 --- a/posthog/hogql_queries/insights/utils/aggregations.py +++ b/posthog/hogql_queries/insights/utils/aggregations.py @@ -68,20 +68,26 @@ def __init__( filters: ast.Expr | None = None, event_or_action_filter: ast.Expr | None = None, ratio: ast.RatioExpr | None = None, + is_first_matching_event: bool = False, ): - query.select = self._select_expr(date_from, filters) + query.select = self._select_expr(date_from, filters, is_first_matching_event) query.select_from = self._select_from_expr(ratio) query.where = self._where_expr(date_to, event_or_action_filter) query.group_by = self._group_by_expr() query.having = self._having_expr() super().__init__(query) - def _select_expr(self, date_from: ast.Expr, filters: ast.Expr | None = None): + def _select_expr(self, date_from: ast.Expr, filters: ast.Expr | None = None, is_first_matching_event: bool = False): aggregation_filters = date_from if filters is None else ast.And(exprs=[date_from, filters]) + min_timestamp_expr = ( + ast.Call(name="min", args=[ast.Field(chain=["timestamp"])]) + if not is_first_matching_event or filters is None + else ast.Call(name="minIf", args=[ast.Field(chain=["timestamp"]), filters]) + ) return [ ast.Alias( alias="min_timestamp", - expr=ast.Call(name="min", args=[ast.Field(chain=["timestamp"])]), + expr=min_timestamp_expr, ), ast.Alias( alias="min_timestamp_with_condition", diff --git a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py index a8cc01a0b89fb7..3439983291d68c 100644 --- a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py +++ b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py @@ -2,7 +2,9 @@ import json import re from enum import StrEnum -from typing import Any, Literal +from typing import Any, Literal, Optional, Union, cast + +from pydantic import Field from posthog.hogql_queries.legacy_compatibility.clean_properties import clean_entity_properties, clean_global_properties from posthog.models.entity.entity import Entity as LegacyEntity @@ -25,6 +27,7 @@ LifecycleQuery, PathsFilter, PathsQuery, + RetentionEntity, RetentionFilter, RetentionQuery, StickinessFilter, @@ -56,6 +59,10 @@ class MathAvailability(StrEnum): ] +def is_entity_variable(item: Any) -> bool: + return isinstance(item, str) and item.startswith("{") and item.endswith("}") + + def clean_display(display: str): if display not in [c.value for c in ChartDisplayType]: return None @@ -110,8 +117,16 @@ def transform_legacy_hidden_legend_keys(hidden_legend_keys): def legacy_entity_to_node( - entity: LegacyEntity, include_properties: bool, math_availability: MathAvailability -) -> EventsNode | ActionsNode | DataWarehouseNode: + entity: LegacyEntity | str, + include_properties: bool, + math_availability: MathAvailability, + allow_variables: bool = False, +) -> EventsNode | ActionsNode | DataWarehouseNode | str: + if allow_variables and is_entity_variable(entity): + return cast(str, entity) + + assert not isinstance(entity, str) + """ Takes a legacy entity and converts it into an EventsNode or ActionsNode. """ @@ -172,6 +187,7 @@ def exlusion_entity_to_node(entity) -> FunnelExclusionEventsNode | FunnelExclusi base_entity = legacy_entity_to_node( LegacyEntity(entity), include_properties=False, math_availability=MathAvailability.Unavailable ) + assert isinstance(base_entity, EventsNode | ActionsNode) if isinstance(base_entity, EventsNode): return FunnelExclusionEventsNode( **base_entity.model_dump(), @@ -187,7 +203,12 @@ def exlusion_entity_to_node(entity) -> FunnelExclusionEventsNode | FunnelExclusi # TODO: remove this method that returns legacy entities -def to_base_entity_dict(entity: dict): +def to_base_entity_dict(entity: dict | str): + if isinstance(entity, str): + if is_entity_variable(entity): + return entity + raise ValueError("Expecting valid entity or template variable") + return { "type": entity.get("type"), "id": entity.get("id"), @@ -206,6 +227,52 @@ def to_base_entity_dict(entity: dict): "STICKINESS": StickinessQuery, } + +class TrendsQueryWithTemplateVariables(TrendsQuery): + series: list[Union[EventsNode, ActionsNode, DataWarehouseNode, str]] = Field( # type: ignore + ..., description="Events and actions to include" + ) + + +class FunnelsQueryWithTemplateVariables(FunnelsQuery): + series: list[Union[EventsNode, ActionsNode, DataWarehouseNode, str]] = Field( # type: ignore + ..., description="Events and actions to include" + ) + + +class RetentionFilterWithTemplateVariables(RetentionFilter): + returningEntity: Optional[RetentionEntity | str] = None # type: ignore + targetEntity: Optional[RetentionEntity | str] = None # type: ignore + + +class RetentionQueryWithTemplateVariables(RetentionQuery): + retentionFilter: RetentionFilterWithTemplateVariables = Field( + ..., description="Properties specific to the retention insight" + ) + + +class LifecycleQueryWithTemplateVariables(LifecycleQuery): + series: list[Union[EventsNode, ActionsNode, DataWarehouseNode, str]] = Field( # type: ignore + ..., description="Events and actions to include" + ) + + +class StickinessQueryWithTemplateVariables(StickinessQuery): + series: list[Union[EventsNode, ActionsNode, DataWarehouseNode, str]] = Field( # type: ignore + ..., description="Events and actions to include" + ) + + +# +insight_to_query_type_with_variables = { + "TRENDS": TrendsQueryWithTemplateVariables, + "FUNNELS": FunnelsQueryWithTemplateVariables, + "RETENTION": RetentionQueryWithTemplateVariables, + "PATHS": PathsQuery, + "LIFECYCLE": LifecycleQueryWithTemplateVariables, + "STICKINESS": StickinessQueryWithTemplateVariables, +} + INSIGHT_TYPE = Literal["TRENDS", "FUNNELS", "RETENTION", "PATHS", "LIFECYCLE", "STICKINESS"] @@ -232,12 +299,12 @@ def _interval(filter: dict): return {"interval": filter.get("interval")} -def _series(filter: dict): +def _series(filter: dict, allow_variables: bool = False): if _insight_type(filter) == "RETENTION" or _insight_type(filter) == "PATHS": return {} # remove templates gone wrong - if filter.get("events") is not None: + if not allow_variables and filter.get("events") is not None: filter["events"] = [event for event in filter.get("events") if not (isinstance(event, str))] math_availability: MathAvailability = MathAvailability.Unavailable @@ -252,15 +319,16 @@ def _series(filter: dict): return { "series": [ - legacy_entity_to_node(entity, include_properties, math_availability) - for entity in _entities(filter) - if not (entity.type == "actions" and entity.id is None) + legacy_entity_to_node(entity, include_properties, math_availability, allow_variables) + for entity in _entities(filter, allow_variables) + if isinstance(entity, str) or not (entity.type == "actions" and entity.id is None) ] } -def _entities(filter: dict): - processed_entities: list[LegacyEntity] = [] +def _entities(filter: dict, allow_variables: bool = False): + processed_entities: list[LegacyEntity | str] = [] + has_variables = False # add actions actions = filter.get("actions", []) @@ -272,7 +340,18 @@ def _entities(filter: dict): events = filter.get("events", []) if isinstance(events, str): events = json.loads(events) - processed_entities.extend([LegacyEntity({**entity, "type": "events"}) for entity in events]) + + def process_event(entity) -> LegacyEntity | str: + nonlocal has_variables + + # strings represent template variables, return them as-is + if allow_variables and isinstance(entity, str): + has_variables = True + return entity + else: + return LegacyEntity({**entity, "type": "events"}) + + processed_entities.extend([process_event(entity) for entity in events]) # add data warehouse warehouse = filter.get("data_warehouse", []) @@ -280,12 +359,13 @@ def _entities(filter: dict): warehouse = json.loads(warehouse) processed_entities.extend([LegacyEntity({**entity, "type": "data_warehouse"}) for entity in warehouse]) - # order by order - processed_entities.sort(key=lambda entity: entity.order if entity.order else -1) + if not has_variables: + # order by order + processed_entities.sort(key=lambda entity: entity.order if entity.order else -1) # type: ignore - # set sequential index values on entities - for index, entity in enumerate(processed_entities): - entity.index = index + # set sequential index values on entities + for index, entity in enumerate(processed_entities): + entity.index = index # type: ignore return processed_entities @@ -394,7 +474,7 @@ def _group_aggregation_filter(filter: dict): return {"aggregation_group_type_index": filter.get("aggregation_group_type_index")} -def _insight_filter(filter: dict): +def _insight_filter(filter: dict, allow_variables: bool = False): if _insight_type(filter) == "TRENDS": insight_filter = { "trendsFilter": TrendsFilter( @@ -440,18 +520,19 @@ def _insight_filter(filter: dict): ), } elif _insight_type(filter) == "RETENTION": + RetentionFilterClass = RetentionFilterWithTemplateVariables if allow_variables else RetentionFilter insight_filter = { - "retentionFilter": RetentionFilter( + "retentionFilter": RetentionFilterClass( retentionType=filter.get("retention_type"), retentionReference=filter.get("retention_reference"), totalIntervals=filter.get("total_intervals"), returningEntity=( - to_base_entity_dict(filter.get("returning_entity")) + to_base_entity_dict(filter.get("returning_entity")) # type: ignore if filter.get("returning_entity") is not None else None ), targetEntity=( - to_base_entity_dict(filter.get("target_entity")) + to_base_entity_dict(filter.get("target_entity")) # type: ignore if filter.get("target_entity") is not None else None ), @@ -526,22 +607,26 @@ def _insight_type(filter: dict) -> INSIGHT_TYPE: return filter.get("insight", "TRENDS") -def filter_to_query(filter: dict) -> InsightQueryNode: +def filter_to_query(filter: dict, allow_variables: bool = False) -> InsightQueryNode: filter = copy.deepcopy(filter) # duplicate to prevent accidental filter alterations - Query = insight_to_query_type[_insight_type(filter)] + Query = ( + insight_to_query_type_with_variables[_insight_type(filter)] + if allow_variables + else insight_to_query_type[_insight_type(filter)] + ) data = { **_date_range(filter), **_interval(filter), - **_series(filter), + **_series(filter, allow_variables), **_sampling_factor(filter), **_filter_test_accounts(filter), **_properties(filter), **_breakdown_filter(filter), **_compare_filter(filter), **_group_aggregation_filter(filter), - **_insight_filter(filter), + **_insight_filter(filter, allow_variables), } # :KLUDGE: We do this dance to have default values instead of None, when setting diff --git a/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py index b0e9e6a6c4ec95..69407b235c05b6 100644 --- a/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py +++ b/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py @@ -1,3 +1,4 @@ +from typing import cast import pytest from posthog.hogql_queries.legacy_compatibility.filter_to_query import ( @@ -1820,3 +1821,37 @@ def test_ignores_digit_only_keys(self): indexes = hidden_legend_keys_to_breakdowns(hidden_legend_keys) self.assertEqual(indexes, ["Opera"]) + + +class TestDashboardTemplateConversion(BaseTest): + def test_trend_series_with_variables(self): + filter = { + "insight": "TRENDS", + "events": ["{VARIABLE}"], + } + + query = cast(TrendsQuery, filter_to_query(filter, allow_variables=True)) + + self.assertEqual(query.series, ["{VARIABLE}"]) + + def test_funnel_series_with_variables(self): + filter = { + "insight": "FUNNELS", + "events": ["{VARIABLE1}", "{VARIABLE2}"], + } + + query = cast(FunnelsQuery, filter_to_query(filter, allow_variables=True)) + + self.assertEqual(query.series, ["{VARIABLE1}", "{VARIABLE2}"]) + + def test_retention_entities_with_variables(self): + filter = { + "insight": "RETENTION", + "target_entity": "{VARIABLE1}", + "returning_entity": "{VARIABLE2}", + } + + query = cast(RetentionQuery, filter_to_query(filter, allow_variables=True)) + + self.assertEqual(query.retentionFilter.targetEntity, "{VARIABLE1}") + self.assertEqual(query.retentionFilter.returningEntity, "{VARIABLE2}") diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index bcc091e516203f..abcaddde2d3cfa 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -704,7 +704,7 @@ def to_actors_query(self, *args, **kwargs) -> ast.SelectQuery | ast.SelectSetQue # TODO: add support for selecting and filtering by breakdowns raise NotImplementedError() - def to_hogql(self) -> str: + def to_hogql(self, **kwargs) -> str: with self.timings.measure("to_hogql"): return print_ast( self.to_query(), @@ -715,6 +715,7 @@ def to_hogql(self) -> str: modifiers=self.modifiers, ), "hogql", + **kwargs, ) def get_cache_payload(self) -> dict: diff --git a/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr b/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr index a23c828bb8864a..20abb16f555ddc 100644 --- a/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr +++ b/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr @@ -1,4 +1,32 @@ # serializer version: 1 +# name: TestErrorTrackingQueryRunner.test_assignee_groups + ''' + SELECT count(DISTINCT events.uuid) AS occurrences, + count(DISTINCT events.`$session_id`) AS sessions, + count(DISTINCT events.distinct_id) AS users, + max(toTimeZone(events.timestamp, 'UTC')) AS last_seen, + min(toTimeZone(events.timestamp, 'UTC')) AS first_seen, + if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id + FROM events + LEFT OUTER JOIN + (SELECT argMax(error_tracking_issue_fingerprint_overrides.issue_id, error_tracking_issue_fingerprint_overrides.version) AS issue_id, + error_tracking_issue_fingerprint_overrides.fingerprint AS fingerprint + FROM error_tracking_issue_fingerprint_overrides + WHERE equals(error_tracking_issue_fingerprint_overrides.team_id, 99999) + GROUP BY error_tracking_issue_fingerprint_overrides.fingerprint + HAVING ifNull(equals(argMax(error_tracking_issue_fingerprint_overrides.is_deleted, error_tracking_issue_fingerprint_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__exception_issue_override ON equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_fingerprint'), ''), 'null'), '^"|"$', ''), events__exception_issue_override.fingerprint) + WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID'))), 1) + GROUP BY if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- # name: TestErrorTrackingQueryRunner.test_column_names ''' SELECT count(DISTINCT events.uuid) AS occurrences, @@ -6,7 +34,7 @@ count(DISTINCT events.distinct_id) AS users, max(toTimeZone(events.timestamp, 'UTC')) AS last_seen, min(toTimeZone(events.timestamp, 'UTC')) AS first_seen, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') AS id + if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id FROM events LEFT OUTER JOIN (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, @@ -15,6 +43,13 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT OUTER JOIN + (SELECT argMax(error_tracking_issue_fingerprint_overrides.issue_id, error_tracking_issue_fingerprint_overrides.version) AS issue_id, + error_tracking_issue_fingerprint_overrides.fingerprint AS fingerprint + FROM error_tracking_issue_fingerprint_overrides + WHERE equals(error_tracking_issue_fingerprint_overrides.team_id, 99999) + GROUP BY error_tracking_issue_fingerprint_overrides.fingerprint + HAVING ifNull(equals(argMax(error_tracking_issue_fingerprint_overrides.is_deleted, error_tracking_issue_fingerprint_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__exception_issue_override ON equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_fingerprint'), ''), 'null'), '^"|"$', ''), events__exception_issue_override.fingerprint) LEFT JOIN (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email @@ -25,8 +60,8 @@ WHERE equals(person.team_id, 99999) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '')), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1)) - GROUP BY replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') + WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID'))), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1)) + GROUP BY if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, @@ -44,7 +79,7 @@ count(DISTINCT events.distinct_id) AS users, max(toTimeZone(events.timestamp, 'UTC')) AS last_seen, min(toTimeZone(events.timestamp, 'UTC')) AS first_seen, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') AS id + if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id FROM events LEFT OUTER JOIN (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, @@ -53,6 +88,13 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT OUTER JOIN + (SELECT argMax(error_tracking_issue_fingerprint_overrides.issue_id, error_tracking_issue_fingerprint_overrides.version) AS issue_id, + error_tracking_issue_fingerprint_overrides.fingerprint AS fingerprint + FROM error_tracking_issue_fingerprint_overrides + WHERE equals(error_tracking_issue_fingerprint_overrides.team_id, 99999) + GROUP BY error_tracking_issue_fingerprint_overrides.fingerprint + HAVING ifNull(equals(argMax(error_tracking_issue_fingerprint_overrides.is_deleted, error_tracking_issue_fingerprint_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__exception_issue_override ON equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_fingerprint'), ''), 'null'), '^"|"$', ''), events__exception_issue_override.fingerprint) LEFT JOIN (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email @@ -63,8 +105,8 @@ WHERE equals(person.team_id, 99999) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '')), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), '01936e7f-d7ff-7314-b2d4-7627981e34f0'), 0)) - GROUP BY replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') + WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID'))), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1), ifNull(equals(if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')), '01936e7f-d7ff-7314-b2d4-7627981e34f0'), 0)) + GROUP BY if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, @@ -82,7 +124,7 @@ count(DISTINCT events.distinct_id) AS users, max(toTimeZone(events.timestamp, 'UTC')) AS last_seen, min(toTimeZone(events.timestamp, 'UTC')) AS first_seen, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') AS id + if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id FROM events LEFT OUTER JOIN (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, @@ -91,6 +133,13 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT OUTER JOIN + (SELECT argMax(error_tracking_issue_fingerprint_overrides.issue_id, error_tracking_issue_fingerprint_overrides.version) AS issue_id, + error_tracking_issue_fingerprint_overrides.fingerprint AS fingerprint + FROM error_tracking_issue_fingerprint_overrides + WHERE equals(error_tracking_issue_fingerprint_overrides.team_id, 99999) + GROUP BY error_tracking_issue_fingerprint_overrides.fingerprint + HAVING ifNull(equals(argMax(error_tracking_issue_fingerprint_overrides.is_deleted, error_tracking_issue_fingerprint_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__exception_issue_override ON equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_fingerprint'), ''), 'null'), '^"|"$', ''), events__exception_issue_override.fingerprint) LEFT JOIN (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email @@ -101,8 +150,8 @@ WHERE equals(person.team_id, 99999) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '')), ifNull(equals(events__person.properties___email, 'email@posthog.com'), 0)) - GROUP BY replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') + WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID'))), ifNull(equals(events__person.properties___email, 'email@posthog.com'), 0)) + GROUP BY if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, @@ -120,10 +169,17 @@ count(DISTINCT events.distinct_id) AS users, max(toTimeZone(events.timestamp, 'UTC')) AS last_seen, min(toTimeZone(events.timestamp, 'UTC')) AS first_seen, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') AS id + if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id FROM events - WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '')), 1, ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), '01936e7f-d7ff-7314-b2d4-7627981e34f0'), 0)) - GROUP BY replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') + LEFT OUTER JOIN + (SELECT argMax(error_tracking_issue_fingerprint_overrides.issue_id, error_tracking_issue_fingerprint_overrides.version) AS issue_id, + error_tracking_issue_fingerprint_overrides.fingerprint AS fingerprint + FROM error_tracking_issue_fingerprint_overrides + WHERE equals(error_tracking_issue_fingerprint_overrides.team_id, 99999) + GROUP BY error_tracking_issue_fingerprint_overrides.fingerprint + HAVING ifNull(equals(argMax(error_tracking_issue_fingerprint_overrides.is_deleted, error_tracking_issue_fingerprint_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__exception_issue_override ON equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_fingerprint'), ''), 'null'), '^"|"$', ''), events__exception_issue_override.fingerprint) + WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID'))), 1, ifNull(equals(if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')), '01936e7f-d7ff-7314-b2d4-7627981e34f0'), 0)) + GROUP BY if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, @@ -141,10 +197,17 @@ count(DISTINCT events.distinct_id) AS users, max(toTimeZone(events.timestamp, 'UTC')) AS last_seen, min(toTimeZone(events.timestamp, 'UTC')) AS first_seen, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') AS id + if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id FROM events - WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '')), 1) - GROUP BY replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') + LEFT OUTER JOIN + (SELECT argMax(error_tracking_issue_fingerprint_overrides.issue_id, error_tracking_issue_fingerprint_overrides.version) AS issue_id, + error_tracking_issue_fingerprint_overrides.fingerprint AS fingerprint + FROM error_tracking_issue_fingerprint_overrides + WHERE equals(error_tracking_issue_fingerprint_overrides.team_id, 99999) + GROUP BY error_tracking_issue_fingerprint_overrides.fingerprint + HAVING ifNull(equals(argMax(error_tracking_issue_fingerprint_overrides.is_deleted, error_tracking_issue_fingerprint_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__exception_issue_override ON equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_fingerprint'), ''), 'null'), '^"|"$', ''), events__exception_issue_override.fingerprint) + WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID'))), 1) + GROUP BY if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) ORDER BY last_seen DESC LIMIT 101 OFFSET 0 SETTINGS readonly=2, @@ -163,10 +226,17 @@ count(DISTINCT events.distinct_id) AS users, max(toTimeZone(events.timestamp, 'UTC')) AS last_seen, min(toTimeZone(events.timestamp, 'UTC')) AS first_seen, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') AS id + if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id FROM events - WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '')), 1) - GROUP BY replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') + LEFT OUTER JOIN + (SELECT argMax(error_tracking_issue_fingerprint_overrides.issue_id, error_tracking_issue_fingerprint_overrides.version) AS issue_id, + error_tracking_issue_fingerprint_overrides.fingerprint AS fingerprint + FROM error_tracking_issue_fingerprint_overrides + WHERE equals(error_tracking_issue_fingerprint_overrides.team_id, 99999) + GROUP BY error_tracking_issue_fingerprint_overrides.fingerprint + HAVING ifNull(equals(argMax(error_tracking_issue_fingerprint_overrides.is_deleted, error_tracking_issue_fingerprint_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__exception_issue_override ON equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_fingerprint'), ''), 'null'), '^"|"$', ''), events__exception_issue_override.fingerprint) + WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID'))), 1) + GROUP BY if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) ORDER BY first_seen ASC LIMIT 101 OFFSET 0 SETTINGS readonly=2, @@ -185,7 +255,7 @@ count(DISTINCT events.distinct_id) AS users, max(toTimeZone(events.timestamp, 'UTC')) AS last_seen, min(toTimeZone(events.timestamp, 'UTC')) AS first_seen, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') AS id + if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id FROM events LEFT OUTER JOIN (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, @@ -194,6 +264,13 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT OUTER JOIN + (SELECT argMax(error_tracking_issue_fingerprint_overrides.issue_id, error_tracking_issue_fingerprint_overrides.version) AS issue_id, + error_tracking_issue_fingerprint_overrides.fingerprint AS fingerprint + FROM error_tracking_issue_fingerprint_overrides + WHERE equals(error_tracking_issue_fingerprint_overrides.team_id, 99999) + GROUP BY error_tracking_issue_fingerprint_overrides.fingerprint + HAVING ifNull(equals(argMax(error_tracking_issue_fingerprint_overrides.is_deleted, error_tracking_issue_fingerprint_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__exception_issue_override ON equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_fingerprint'), ''), 'null'), '^"|"$', ''), events__exception_issue_override.fingerprint) LEFT JOIN (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email @@ -204,8 +281,8 @@ WHERE equals(person.team_id, 99999) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '')), and(less(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-01-11 00:00:00.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-01-10 00:00:00.000000', 6, 'UTC')), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1)), or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0))) - GROUP BY replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') + WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID'))), and(less(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-01-11 00:00:00.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-01-10 00:00:00.000000', 6, 'UTC')), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1)), or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0))) + GROUP BY if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, @@ -223,7 +300,7 @@ count(DISTINCT events.distinct_id) AS users, max(toTimeZone(events.timestamp, 'UTC')) AS last_seen, min(toTimeZone(events.timestamp, 'UTC')) AS first_seen, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') AS id + if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) AS id FROM events LEFT OUTER JOIN (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, @@ -232,6 +309,13 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT OUTER JOIN + (SELECT argMax(error_tracking_issue_fingerprint_overrides.issue_id, error_tracking_issue_fingerprint_overrides.version) AS issue_id, + error_tracking_issue_fingerprint_overrides.fingerprint AS fingerprint + FROM error_tracking_issue_fingerprint_overrides + WHERE equals(error_tracking_issue_fingerprint_overrides.team_id, 99999) + GROUP BY error_tracking_issue_fingerprint_overrides.fingerprint + HAVING ifNull(equals(argMax(error_tracking_issue_fingerprint_overrides.is_deleted, error_tracking_issue_fingerprint_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__exception_issue_override ON equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_fingerprint'), ''), 'null'), '^"|"$', ''), events__exception_issue_override.fingerprint) LEFT JOIN (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email @@ -242,8 +326,8 @@ WHERE equals(person.team_id, 99999) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '')), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1), and(or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0)), or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0)))) - GROUP BY replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') + WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID'))), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1), and(or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0)), or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0)))) + GROUP BY if(not(empty(events__exception_issue_override.issue_id)), events__exception_issue_override.issue_id, accurateCastOrNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', ''), 'UUID')) LIMIT 101 OFFSET 0 SETTINGS readonly=2, max_execution_time=60, diff --git a/posthog/hogql_queries/test/test_error_tracking_query_runner.py b/posthog/hogql_queries/test/test_error_tracking_query_runner.py index 2b8bb4e78f83a4..6e153871626437 100644 --- a/posthog/hogql_queries/test/test_error_tracking_query_runner.py +++ b/posthog/hogql_queries/test/test_error_tracking_query_runner.py @@ -1,5 +1,7 @@ from unittest import TestCase from freezegun import freeze_time +from datetime import datetime +from zoneinfo import ZoneInfo from dateutil.relativedelta import relativedelta from django.utils.timezone import now @@ -14,7 +16,13 @@ PersonPropertyFilter, PropertyOperator, ) -from posthog.models.error_tracking import ErrorTrackingIssue +from posthog.models.error_tracking import ( + ErrorTrackingIssue, + ErrorTrackingIssueFingerprintV2, + ErrorTrackingIssueAssignment, + update_error_tracking_issue_fingerprints, + override_error_tracking_issue_fingerprint, +) from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, @@ -24,6 +32,7 @@ flush_persons_and_events, ) + SAMPLE_STACK_TRACE = [ { "abs_path": "/code/posthog/clickhouse/client/execute.py", @@ -186,12 +195,22 @@ class TestErrorTrackingQueryRunner(ClickhouseTestMixin, APIBaseTest): distinct_id_one = "user_1" distinct_id_two = "user_2" - issue_one = "01936e7f-d7ff-7314-b2d4-7627981e34f0" - issue_two = "01936e80-5e69-7e70-b837-871f5cdad28b" - issue_three = "01936e80-aa51-746f-aec4-cdf16a5c5332" + issue_id_one = "01936e7f-d7ff-7314-b2d4-7627981e34f0" + issue_id_two = "01936e80-5e69-7e70-b837-871f5cdad28b" + issue_id_three = "01936e80-aa51-746f-aec4-cdf16a5c5332" + issue_three_fingerprint = "issue_three_fingerprint" + + def override_fingerprint(self, fingerprint, issue_id, version=1): + update_error_tracking_issue_fingerprints(team_id=self.team.pk, issue_id=issue_id, fingerprints=[fingerprint]) + override_error_tracking_issue_fingerprint( + team_id=self.team.pk, fingerprint=fingerprint, issue_id=issue_id, version=version + ) - def create_events_and_issue(self, issue_id, distinct_ids, timestamp=None, exception_list=None): - event_properties = {"$exception_issue_id": issue_id} + def create_events_and_issue(self, issue_id, fingerprint, distinct_ids, timestamp=None, exception_list=None): + issue = ErrorTrackingIssue.objects.create(id=issue_id, team=self.team) + ErrorTrackingIssueFingerprintV2.objects.create(team=self.team, issue=issue, fingerprint=fingerprint) + + event_properties = {"$exception_issue_id": issue_id, "$exception_fingerprint": fingerprint} if exception_list: event_properties["$exception_list"] = exception_list @@ -204,8 +223,6 @@ def create_events_and_issue(self, issue_id, distinct_ids, timestamp=None, except timestamp=timestamp, ) - ErrorTrackingIssue.objects.create(id=issue_id, team=self.team) - def setUp(self): super().setUp() @@ -225,16 +242,23 @@ def setUp(self): is_identified=True, ) - self.create_events_and_issue( - issue_id=self.issue_one, + self.issue_one = self.create_events_and_issue( + issue_id=self.issue_id_one, + fingerprint="issue_one_fingerprint", distinct_ids=[self.distinct_id_one, self.distinct_id_two], timestamp=now() - relativedelta(hours=3), ) self.create_events_and_issue( - issue_id=self.issue_two, distinct_ids=[self.distinct_id_one], timestamp=now() - relativedelta(hours=2) + issue_id=self.issue_id_two, + fingerprint="issue_two_fingerprint", + distinct_ids=[self.distinct_id_one], + timestamp=now() - relativedelta(hours=2), ) self.create_events_and_issue( - issue_id=self.issue_three, distinct_ids=[self.distinct_id_two], timestamp=now() - relativedelta(hours=1) + issue_id=self.issue_id_three, + fingerprint=self.issue_three_fingerprint, + distinct_ids=[self.distinct_id_two], + timestamp=now() - relativedelta(hours=1), ) flush_persons_and_events() @@ -271,7 +295,7 @@ def test_column_names(self): team=self.team, query=ErrorTrackingQuery( kind="ErrorTrackingQuery", - issueId=self.issue_one, + issueId=self.issue_id_one, dateRange=DateRange(), filterTestAccounts=True, ), @@ -296,7 +320,7 @@ def test_issue_grouping(self): team=self.team, query=ErrorTrackingQuery( kind="ErrorTrackingQuery", - issueId=self.issue_one, + issueId=self.issue_id_one, dateRange=DateRange(), ), ) @@ -304,7 +328,7 @@ def test_issue_grouping(self): results = self._calculate(runner)["results"] # returns a single group with multiple errors self.assertEqual(len(results), 1) - self.assertEqual(results[0]["id"], self.issue_one) + self.assertEqual(results[0]["id"], self.issue_id_one) self.assertEqual(results[0]["occurrences"], 2) @snapshot_clickhouse_queries @@ -312,16 +336,19 @@ def test_search_query(self): with freeze_time("2022-01-10 12:11:00"): self.create_events_and_issue( issue_id="01936e81-b0ce-7b56-8497-791e505b0d0c", + fingerprint="fingerprint_DatabaseNotFoundX", distinct_ids=[self.distinct_id_one], exception_list=[{"type": "DatabaseNotFoundX", "value": "this is the same error message"}], ) self.create_events_and_issue( issue_id="01936e81-f5ce-79b1-99f1-f0e9675fcfef", + fingerprint="fingerprint_DatabaseNotFoundY", distinct_ids=[self.distinct_id_one], exception_list=[{"type": "DatabaseNotFoundY", "value": "this is the same error message"}], ) self.create_events_and_issue( issue_id="01936e82-241e-7e27-b47d-6659c54eb0be", + fingerprint="fingerprint_xyz", distinct_ids=[self.distinct_id_two], exception_list=[{"type": "xyz", "value": "this is the same error message"}], ) @@ -372,6 +399,7 @@ def test_search_query_with_multiple_search_items(self): with freeze_time("2022-01-10 12:11:00"): self.create_events_and_issue( issue_id="01936e81-b0ce-7b56-8497-791e505b0d0c", + fingerprint="fingerprint_DatabaseNotFoundX", distinct_ids=[self.distinct_id_one], exception_list=[ { @@ -384,6 +412,7 @@ def test_search_query_with_multiple_search_items(self): self.create_events_and_issue( issue_id="01936e81-f5ce-79b1-99f1-f0e9675fcfef", + fingerprint="fingerprint_DatabaseNotFoundY", distinct_ids=[self.distinct_id_two], exception_list=[ { @@ -420,7 +449,7 @@ def test_only_returns_exception_events(self): distinct_id=self.distinct_id_one, event="$pageview", team=self.team, - properties={"$exception_issue_id": self.issue_one}, + properties={"$exception_issue_id": self.issue_id_one}, ) flush_persons_and_events() @@ -470,7 +499,7 @@ def test_ordering(self): ) results = self._calculate(runner)["results"] - self.assertEqual([r["id"] for r in results], [self.issue_three, self.issue_two, self.issue_one]) + self.assertEqual([r["id"] for r in results], [self.issue_id_three, self.issue_id_two, self.issue_id_one]) runner = ErrorTrackingQueryRunner( team=self.team, @@ -478,88 +507,72 @@ def test_ordering(self): ) results = self._calculate(runner)["results"] - self.assertEqual([r["id"] for r in results], [self.issue_one, self.issue_two, self.issue_three]) - - # def test_merges_and_defaults_groups(self): - # ErrorTrackingGroup.objects.create( - # team=self.team, - # fingerprint=["SyntaxError"], - # merged_fingerprints=[["custom_fingerprint"]], - # assignee=self.user, - # ) - - # runner = ErrorTrackingQueryRunner( - # team=self.team, - # query=ErrorTrackingQuery( - # kind="ErrorTrackingQuery", fingerprint=None, dateRange=DateRange(), order="occurrences" - # ), - # ) - - # results = self._calculate(runner)["results"] - # self.assertEqual( - # results, - # [ - # { - # "assignee": self.user.id, - # "description": "this is the same error message", - # "exception_type": "SyntaxError", - # "fingerprint": ["SyntaxError"], - # "first_seen": datetime(2020, 1, 10, 12, 11, tzinfo=ZoneInfo("UTC")), - # "last_seen": datetime(2020, 1, 10, 12, 11, tzinfo=ZoneInfo("UTC")), - # "merged_fingerprints": [["custom_fingerprint"]], - # # count is (2 x SyntaxError) + (1 x custom_fingerprint) - # "occurrences": 3, - # "sessions": 1, - # "users": 2, - # "volume": None, - # "status": ErrorTrackingGroup.Status.ACTIVE, - # }, - # { - # "assignee": None, - # "description": None, - # "exception_type": "TypeError", - # "fingerprint": ["TypeError"], - # "first_seen": datetime(2020, 1, 10, 12, 11, tzinfo=ZoneInfo("UTC")), - # "last_seen": datetime(2020, 1, 10, 12, 11, tzinfo=ZoneInfo("UTC")), - # "merged_fingerprints": [], - # "occurrences": 1, - # "sessions": 1, - # "users": 1, - # "volume": None, - # "status": ErrorTrackingGroup.Status.ACTIVE, - # }, - # ], - # ) - - # @snapshot_clickhouse_queries - # def test_assignee_groups(self): - # ErrorTrackingGroup.objects.create( - # team=self.team, - # fingerprint=["SyntaxError"], - # assignee=self.user, - # ) - # ErrorTrackingGroup.objects.create( - # team=self.team, - # fingerprint=["custom_fingerprint"], - # assignee=self.user, - # ) - # ErrorTrackingGroup.objects.create( - # team=self.team, - # fingerprint=["TypeError"], - # ) - - # runner = ErrorTrackingQueryRunner( - # team=self.team, - # query=ErrorTrackingQuery( - # kind="ErrorTrackingQuery", - # dateRange=DateRange(), - # assignee=self.user.pk, - # ), - # ) - - # results = self._calculate(runner)["results"] - - # self.assertEqual(sorted([x["fingerprint"] for x in results]), [["SyntaxError"], ["custom_fingerprint"]]) + self.assertEqual([r["id"] for r in results], [self.issue_id_one, self.issue_id_two, self.issue_id_three]) + + def test_overrides_aggregation(self): + self.override_fingerprint(self.issue_three_fingerprint, self.issue_id_one) + + runner = ErrorTrackingQueryRunner( + team=self.team, + query=ErrorTrackingQuery(kind="ErrorTrackingQuery", dateRange=DateRange(), orderBy="occurrences"), + ) + + results = self._calculate(runner)["results"] + self.assertEqual( + results, + [ + { + "id": self.issue_id_one, + "name": None, + "description": None, + "assignee": None, + "volume": None, + "status": ErrorTrackingIssue.Status.ACTIVE, + "first_seen": datetime(2020, 1, 10, 9, 11, tzinfo=ZoneInfo("UTC")), + "last_seen": datetime(2020, 1, 10, 11, 11, tzinfo=ZoneInfo("UTC")), + # count is (2 x issue_one) + (1 x issue_three) + "occurrences": 3, + "sessions": 1, + "users": 2, + }, + { + "id": self.issue_id_two, + "name": None, + "description": None, + "assignee": None, + "volume": None, + "status": ErrorTrackingIssue.Status.ACTIVE, + "first_seen": datetime(2020, 1, 10, 10, 11, tzinfo=ZoneInfo("UTC")), + "last_seen": datetime(2020, 1, 10, 10, 11, tzinfo=ZoneInfo("UTC")), + "occurrences": 1, + "sessions": 1, + "users": 1, + }, + ], + ) + + @snapshot_clickhouse_queries + def test_assignee_groups(self): + issue_id = "e9ac529f-ac1c-4a96-bd3a-107034368d64" + self.create_events_and_issue( + issue_id=issue_id, + fingerprint="assigned_issue_fingerprint", + distinct_ids=[self.distinct_id_one], + ) + flush_persons_and_events() + ErrorTrackingIssueAssignment.objects.create(issue_id=issue_id, user=self.user) + + runner = ErrorTrackingQueryRunner( + team=self.team, + query=ErrorTrackingQuery( + kind="ErrorTrackingQuery", + dateRange=DateRange(), + assignee=self.user.pk, + ), + ) + + results = self._calculate(runner)["results"] + self.assertEqual([x["id"] for x in results], [issue_id]) class TestSearchTokenizer(TestCase): diff --git a/posthog/hogql_queries/web_analytics/stats_table.py b/posthog/hogql_queries/web_analytics/stats_table.py index 1633c4389879d7..a85e1a47dc7b30 100644 --- a/posthog/hogql_queries/web_analytics/stats_table.py +++ b/posthog/hogql_queries/web_analytics/stats_table.py @@ -41,7 +41,9 @@ def __init__(self, *args, **kwargs): def to_query(self) -> ast.SelectQuery: if self.query.breakdownBy == WebStatsBreakdown.PAGE: - if self.query.includeScrollDepth and self.query.includeBounceRate: + if self.query.conversionGoal: + return self.to_main_query(self._counts_breakdown_value()) + elif self.query.includeScrollDepth and self.query.includeBounceRate: return self.to_path_scroll_bounce_query() elif self.query.includeBounceRate: return self.to_path_bounce_query() @@ -50,190 +52,77 @@ def to_query(self) -> ast.SelectQuery: if self.query.includeBounceRate: return self.to_entry_bounce_query() - if self._has_session_properties(): - return self._to_main_query_with_session_properties() + return self.to_main_query(self._counts_breakdown_value()) - return self.to_main_query() - - def to_main_query(self) -> ast.SelectQuery: + def to_main_query(self, breakdown) -> ast.SelectQuery: with self.timings.measure("stats_table_query"): - query = parse_select( - """ -WITH - start_timestamp >= {date_from} AND start_timestamp < {date_to} AS current_period_segment, - start_timestamp >= {date_from_previous_period} AND start_timestamp < {date_from} AS previous_period_segment -SELECT - {processed_breakdown_value} AS "context.columns.breakdown_value", - tuple( - uniqIf(filtered_person_id, current_period_segment), - uniqIf(filtered_person_id, previous_period_segment) - ) AS "context.columns.visitors", - tuple( - sumIf(filtered_pageview_count, current_period_segment), - sumIf(filtered_pageview_count, previous_period_segment) - ) AS "context.columns.views" -FROM ( - SELECT - any(person_id) AS filtered_person_id, - count() AS filtered_pageview_count, - {breakdown_value} AS breakdown_value, - min(session.$start_timestamp) as start_timestamp - FROM events - WHERE and( - timestamp >= {date_from_previous_period}, - timestamp < {date_to}, - events.event == '$pageview', - {all_properties}, - {where_breakdown} - ) - GROUP BY events.`$session_id`, breakdown_value -) -GROUP BY "context.columns.breakdown_value" -ORDER BY "context.columns.visitors" DESC, -"context.columns.views" DESC, -"context.columns.breakdown_value" ASC -""", - timings=self.timings, - placeholders={ - "breakdown_value": self._counts_breakdown_value(), - "processed_breakdown_value": self._processed_breakdown_value(), - "where_breakdown": self.where_breakdown(), - "all_properties": self._all_properties(), - "date_from_previous_period": self._date_from_previous_period(), - "date_from": self._date_from(), - "date_to": self._date_to(), - }, - ) - - assert isinstance(query, ast.SelectQuery) - - if self._include_extra_aggregation_value(): - query.select.append(self._extra_aggregation_value()) - - return query + # Base selects, always returns the breakdown value, and the total number of visitors + selects = [ + ast.Alias(alias="context.columns.breakdown_value", expr=self._processed_breakdown_value()), + self._period_comparison_tuple("filtered_person_id", "context.columns.visitors", "uniq"), + ] + + if self.query.conversionGoal is not None: + selects.extend( + [ + self._period_comparison_tuple("conversion_count", "context.columns.total_conversions", "sum"), + self._period_comparison_tuple( + "conversion_person_id", "context.columns.unique_conversions", "uniq" + ), + ast.Alias( + alias="context.columns.conversion_rate", + expr=ast.Tuple( + exprs=[ + parse_expr( + "if(`context.columns.visitors`.1 = 0, NULL, `context.columns.unique_conversions`.1 / `context.columns.visitors`.1)" + ), + parse_expr( + "if(`context.columns.visitors`.2 = 0, NULL, `context.columns.unique_conversions`.2 / `context.columns.visitors`.2)" + ), + ] + ), + ), + ] + ) + else: + selects.append( + self._period_comparison_tuple("filtered_pageview_count", "context.columns.views", "sum"), + ) - def _to_main_query_with_session_properties(self) -> ast.SelectQuery: - with self.timings.measure("stats_table_query"): - query = parse_select( - """ -WITH - start_timestamp >= {date_from} AND start_timestamp < {date_to} AS current_period_segment, - start_timestamp >= {date_from_previous_period} AND start_timestamp < {date_from} AS previous_period_segment -SELECT - {processed_breakdown_value} AS "context.columns.breakdown_value", - tuple( - uniqIf(filtered_person_id, current_period_segment), - uniqIf(filtered_person_id, previous_period_segment) - ) AS "context.columns.visitors", - tuple( - sumIf(filtered_pageview_count, current_period_segment), - sumIf(filtered_pageview_count, previous_period_segment) - ) AS "context.columns.views" -FROM ( - SELECT - any(person_id) AS filtered_person_id, - count() AS filtered_pageview_count, - {breakdown_value} AS breakdown_value, - session.session_id AS session_id, - min(session.$start_timestamp) as start_timestamp - FROM events - WHERE and( - timestamp >= {date_from_previous_period}, - timestamp < {date_to}, - events.event == '$pageview', - {event_properties}, - {session_properties}, - {where_breakdown} - ) - GROUP BY session_id, breakdown_value -) -GROUP BY "context.columns.breakdown_value" -ORDER BY "context.columns.visitors" DESC, -"context.columns.views" DESC, -"context.columns.breakdown_value" ASC -""", - timings=self.timings, - placeholders={ - "breakdown_value": self._counts_breakdown_value(), - "processed_breakdown_value": self._processed_breakdown_value(), - "where_breakdown": self.where_breakdown(), - "event_properties": self._event_properties(), - "session_properties": self._session_properties(), - "date_from_previous_period": self._date_from_previous_period(), - "date_from": self._date_from(), - "date_to": self._date_to(), - }, + if self._include_extra_aggregation_value(): + selects.append(self._extra_aggregation_value()) + + query = ast.SelectQuery( + select=selects, + select_from=ast.JoinExpr(table=self._main_inner_query(breakdown)), + group_by=[ast.Field(chain=["context.columns.breakdown_value"])], + order_by=[ + ast.OrderExpr(expr=ast.Field(chain=["context.columns.visitors"]), order="DESC"), + ast.OrderExpr( + expr=ast.Field( + chain=[ + "context.columns.views" + if self.query.conversionGoal is None + else "context.columns.total_conversions" + ] + ), + order="DESC", + ), + ast.OrderExpr(expr=ast.Field(chain=["context.columns.breakdown_value"]), order="ASC"), + ], ) - assert isinstance(query, ast.SelectQuery) - - if self.query.breakdownBy == WebStatsBreakdown.LANGUAGE: - query.select.append(self._extra_aggregation_value()) return query def to_entry_bounce_query(self) -> ast.SelectQuery: - with self.timings.measure("stats_table_query"): - query = parse_select( - """ -WITH - start_timestamp >= {date_from} AND start_timestamp < {date_to} AS current_period_segment, - start_timestamp >= {date_from_previous_period} AND start_timestamp < {date_from} AS previous_period_segment -SELECT - breakdown_value AS "context.columns.breakdown_value", - tuple( - uniqIf(filtered_person_id, current_period_segment), - uniqIf(filtered_person_id, previous_period_segment) - ) AS "context.columns.visitors", - tuple( - sumIf(filtered_pageview_count, current_period_segment), - sumIf(filtered_pageview_count, previous_period_segment) - ) AS "context.columns.views", - tuple( - avgIf(is_bounce, current_period_segment), - avgIf(is_bounce, previous_period_segment) - ) AS "context.columns.bounce_rate", -FROM ( - SELECT - {bounce_breakdown} AS breakdown_value, - any(person_id) AS filtered_person_id, - count() AS filtered_pageview_count, - any(session.$is_bounce) AS is_bounce, - session.session_id AS session_id, - min(session.$start_timestamp) as start_timestamp - FROM events - WHERE and( - timestamp >= {date_from_previous_period}, - timestamp < {date_to}, - events.event == '$pageview', - {event_properties}, - {session_properties}, - {where_breakdown} - ) - GROUP BY session_id, breakdown_value -) -GROUP BY "context.columns.breakdown_value" -ORDER BY "context.columns.visitors" DESC, -"context.columns.views" DESC, -"context.columns.breakdown_value" ASC -""", - timings=self.timings, - placeholders={ - "bounce_breakdown": self._bounce_entry_pathname_breakdown(), - "where_breakdown": self.where_breakdown(), - "session_properties": self._session_properties(), - "event_properties": self._event_properties(), - "date_from_previous_period": self._date_from_previous_period(), - "date_from": self._date_from(), - "date_to": self._date_to(), - }, - ) - assert isinstance(query, ast.SelectQuery) + query = self.to_main_query(self._bounce_entry_pathname_breakdown()) + + if self.query.conversionGoal is None: + query.select.append(self._period_comparison_tuple("is_bounce", "context.columns.bounce_rate", "avg")) + return query def to_path_scroll_bounce_query(self) -> ast.SelectQuery: - if self.query.breakdownBy != WebStatsBreakdown.PAGE: - raise NotImplementedError("Scroll depth is only supported for page breakdowns") - with self.timings.measure("stats_table_bounce_query"): query = parse_select( """ @@ -438,6 +327,56 @@ def to_path_bounce_query(self) -> ast.SelectQuery: assert isinstance(query, ast.SelectQuery) return query + def _main_inner_query(self, breakdown): + query = parse_select( + """ +SELECT + any(person_id) AS filtered_person_id, + count() AS filtered_pageview_count, + {breakdown_value} AS breakdown_value, + session.session_id AS session_id, + any(session.$is_bounce) AS is_bounce, + min(session.$start_timestamp) as start_timestamp +FROM events +WHERE and(timestamp >= {date_from}, timestamp < {date_to}, {event_where}, {all_properties}, {where_breakdown}) +GROUP BY session_id, breakdown_value +""", + timings=self.timings, + placeholders={ + "breakdown_value": breakdown, + "date_from": self._date_from_previous_period(), + "date_to": self._date_to(), + "event_where": self.event_type_expr, + "all_properties": self._all_properties(), + "where_breakdown": self.where_breakdown(), + }, + ) + + assert isinstance(query, ast.SelectQuery) + + if self.conversion_count_expr and self.conversion_person_id_expr: + query.select.append(ast.Alias(alias="conversion_count", expr=self.conversion_count_expr)) + query.select.append(ast.Alias(alias="conversion_person_id", expr=self.conversion_person_id_expr)) + + return query + + def _period_comparison_tuple(self, column, alias, function_name): + return ast.Alias( + alias=alias, + expr=ast.Tuple( + exprs=[ + self._current_period_aggregate(function_name, column), + self._previous_period_aggregate(function_name, column), + ] + ), + ) + + def _current_period_aggregate(self, function_name, column_name): + return self.period_aggregate(function_name, column_name, self._date_from(), self._date_to()) + + def _previous_period_aggregate(self, function_name, column_name): + return self.period_aggregate(function_name, column_name, self._date_from_previous_period(), self._date_from()) + def _event_properties(self) -> ast.Expr: properties = [ p for p in self.query.properties + self._test_account_filters if get_property_type(p) in ["event", "person"] @@ -461,22 +400,6 @@ def map_scroll_property(property: Union[EventPropertyFilter, PersonPropertyFilte ] return property_to_expr(properties, team=self.team, scope="event") - def _has_session_properties(self) -> bool: - return any( - get_property_type(p) == "session" for p in self.query.properties + self._test_account_filters - ) or self.query.breakdownBy in { - WebStatsBreakdown.INITIAL_CHANNEL_TYPE, - WebStatsBreakdown.INITIAL_REFERRING_DOMAIN, - WebStatsBreakdown.INITIAL_UTM_SOURCE, - WebStatsBreakdown.INITIAL_UTM_CAMPAIGN, - WebStatsBreakdown.INITIAL_UTM_MEDIUM, - WebStatsBreakdown.INITIAL_UTM_TERM, - WebStatsBreakdown.INITIAL_UTM_CONTENT, - WebStatsBreakdown.INITIAL_PAGE, - WebStatsBreakdown.EXIT_PAGE, - WebStatsBreakdown.INITIAL_UTM_SOURCE_MEDIUM_CAMPAIGN, - } - def _session_properties(self) -> ast.Expr: properties = [ p for p in self.query.properties + self._test_account_filters if get_property_type(p) == "session" @@ -513,11 +436,14 @@ def calculate(self): results, { 0: self._join_with_aggregation_value, # breakdown_value - 1: lambda tuple, row: (self._unsample(tuple[0], row), self._unsample(tuple[1], row)), # Views (tuple) - 2: lambda tuple, row: ( + 1: lambda tuple, row: ( # Views (tuple) + self._unsample(tuple[0], row), + self._unsample(tuple[1], row), + ), + 2: lambda tuple, row: ( # Visitors (tuple) self._unsample(tuple[0], row), self._unsample(tuple[1], row), - ), # Visitors (tuple) + ), }, ) @@ -525,9 +451,9 @@ def calculate(self): if self.query.breakdownBy == WebStatsBreakdown.LANGUAGE: # Keep only first 3 columns, we don't need the aggregation value in the frontend - results_mapped = [[column for idx, column in enumerate(row) if idx < 3] for row in results_mapped] + # Remove both the value and the column (used to generate table headers) + results_mapped = [row[:3] for row in results_mapped] - # Remove this before returning it to the frontend columns = ( [column for column in response.columns if column != "context.columns.aggregation_value"] if response.columns is not None diff --git a/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py b/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py index ae4b48b0632c16..f59c95931dd07e 100644 --- a/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py +++ b/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py @@ -3,7 +3,7 @@ from freezegun import freeze_time from posthog.hogql_queries.web_analytics.stats_table import WebStatsTableQueryRunner -from posthog.models import Cohort +from posthog.models import Action, Cohort, Element from posthog.models.utils import uuid7 from posthog.schema import ( DateRange, @@ -13,6 +13,8 @@ PropertyOperator, SessionTableVersion, HogQLQueryModifiers, + CustomEventConversionGoal, + ActionConversionGoal, ) from posthog.test.base import ( APIBaseTest, @@ -38,13 +40,27 @@ def _create_events(self, data, event="$pageview"): }, ) ) - for timestamp, session_id, pathname in timestamps: + for timestamp, session_id, *extra in timestamps: + url = None + elements = None + if event == "$pageview": + url = extra[0] if extra else None + elif event == "$autocapture": + elements = extra[0] if extra else None + properties = extra[1] if extra and len(extra) > 1 else {} + _create_event( team=self.team, event=event, distinct_id=id, timestamp=timestamp, - properties={"$session_id": session_id, "$pathname": pathname}, + properties={ + "$session_id": session_id, + "$pathname": url, + "$current_url": url, + **properties, + }, + elements=elements, ) return person_result @@ -107,6 +123,8 @@ def _run_web_stats_table_query( include_bounce_rate=False, include_scroll_depth=False, properties=None, + action: Optional[Action] = None, + custom_event: Optional[str] = None, session_table_version: SessionTableVersion = SessionTableVersion.V2, filter_test_accounts: Optional[bool] = False, ): @@ -119,6 +137,11 @@ def _run_web_stats_table_query( doPathCleaning=bool(path_cleaning_filters), includeBounceRate=include_bounce_rate, includeScrollDepth=include_scroll_depth, + conversionGoal=ActionConversionGoal(actionId=action.id) + if action + else CustomEventConversionGoal(customEventName=custom_event) + if custom_event + else None, filterTestAccounts=filter_test_accounts, ) self.team.path_cleaning_filters = path_cleaning_filters or [] @@ -1255,3 +1278,217 @@ def test_timezone_filter_with_empty_timezone(self): # Don't crash, treat all of them null assert results == [] + + def test_conversion_goal_no_conversions(self): + s1 = str(uuid7("2023-12-01")) + self._create_events( + [ + ("p1", [("2023-12-01", s1, "https://www.example.com/foo")]), + ] + ) + + action = Action.objects.create( + team=self.team, + name="Visited Bar", + steps_json=[{"event": "$pageview", "url": "https://www.example.com/bar", "url_matching": "regex"}], + ) + + response = self._run_web_stats_table_query( + "2023-12-01", "2023-12-03", breakdown_by=WebStatsBreakdown.PAGE, action=action + ) + + assert [["https://www.example.com/foo", (1, 0), (0, 0), (0, 0), (0, None)]] == response.results + assert [ + "context.columns.breakdown_value", + "context.columns.visitors", + "context.columns.total_conversions", + "context.columns.unique_conversions", + "context.columns.conversion_rate", + ] == response.columns + + def test_conversion_goal_one_pageview_conversion(self): + s1 = str(uuid7("2023-12-01")) + self._create_events( + [ + ("p1", [("2023-12-01", s1, "https://www.example.com/foo")]), + ] + ) + + action = Action.objects.create( + team=self.team, + name="Visited Foo", + steps_json=[ + { + "event": "$pageview", + "url": "https://www.example.com/foo", + "url_matching": "regex", + } + ], + ) + + response = self._run_web_stats_table_query( + "2023-12-01", "2023-12-03", breakdown_by=WebStatsBreakdown.PAGE, action=action + ) + + response = self._run_web_stats_table_query( + "2023-12-01", "2023-12-03", breakdown_by=WebStatsBreakdown.PAGE, action=action + ) + + assert [["https://www.example.com/foo", (1, 0), (1, 0), (1, 0), (1, None)]] == response.results + assert [ + "context.columns.breakdown_value", + "context.columns.visitors", + "context.columns.total_conversions", + "context.columns.unique_conversions", + "context.columns.conversion_rate", + ] == response.columns + + def test_conversion_goal_one_custom_event_conversion(self): + s1 = str(uuid7("2023-12-01")) + self._create_events( + [ + ("p1", [("2023-12-01", s1, "https://www.example.com/foo")]), + ], + event="custom_event", + ) + + response = self._run_web_stats_table_query( + "2023-12-01", + "2023-12-03", + breakdown_by=WebStatsBreakdown.INITIAL_UTM_SOURCE, # Allow the breakdown value to be non-null + custom_event="custom_event", + ) + + assert [[None, (1, 0), (1, 0), (1, 0), (1, None)]] == response.results + assert [ + "context.columns.breakdown_value", + "context.columns.visitors", + "context.columns.total_conversions", + "context.columns.unique_conversions", + "context.columns.conversion_rate", + ] == response.columns + + def test_conversion_goal_one_custom_action_conversion(self): + s1 = str(uuid7("2023-12-01")) + self._create_events( + [ + ("p1", [("2023-12-01", s1)]), + ], + event="custom_event", + ) + + action = Action.objects.create( + team=self.team, + name="Did Custom Event", + steps_json=[ + { + "event": "custom_event", + } + ], + ) + + response = self._run_web_stats_table_query( + "2023-12-01", + "2023-12-03", + breakdown_by=WebStatsBreakdown.INITIAL_UTM_SOURCE, # Allow the breakdown value to be non-null + action=action, + ) + + assert [[None, (1, 0), (1, 0), (1, 0), (1, None)]] == response.results + assert [ + "context.columns.breakdown_value", + "context.columns.visitors", + "context.columns.total_conversions", + "context.columns.unique_conversions", + "context.columns.conversion_rate", + ] == response.columns + + def test_conversion_goal_one_autocapture_conversion(self): + s1 = str(uuid7("2023-12-01")) + self._create_events( + [ + ("p1", [("2023-12-01", s1, [Element(nth_of_type=1, nth_child=0, tag_name="button", text="Pay $10")])]), + ], + event="$autocapture", + ) + + action = Action.objects.create( + team=self.team, + name="Paid $10", + steps_json=[ + { + "event": "$autocapture", + "tag_name": "button", + "text": "Pay $10", + } + ], + ) + + response = self._run_web_stats_table_query( + "2023-12-01", + "2023-12-03", + breakdown_by=WebStatsBreakdown.INITIAL_UTM_SOURCE, # Allow the breakdown value to be non-null + action=action, + ) + + assert [[None, (1, 0), (1, 0), (1, 0), (1, None)]] == response.results + assert [ + "context.columns.breakdown_value", + "context.columns.visitors", + "context.columns.total_conversions", + "context.columns.unique_conversions", + "context.columns.conversion_rate", + ] == response.columns + + def test_conversion_rate(self): + s1 = str(uuid7("2023-12-01")) + s2 = str(uuid7("2023-12-01")) + s3 = str(uuid7("2023-12-01")) + + self._create_events( + [ + ( + "p1", + [ + ("2023-12-01", s1, "https://www.example.com/foo"), + ("2023-12-01", s1, "https://www.example.com/foo"), + ], + ), + ( + "p2", + [ + ("2023-12-01", s2, "https://www.example.com/foo"), + ("2023-12-01", s2, "https://www.example.com/bar"), + ], + ), + ("p3", [("2023-12-01", s3, "https://www.example.com/bar")]), + ] + ) + + action = Action.objects.create( + team=self.team, + name="Visited Foo", + steps_json=[ + { + "event": "$pageview", + "url": "https://www.example.com/foo", + "url_matching": "regex", + } + ], + ) + + response = self._run_web_stats_table_query( + "2023-12-01", "2023-12-03", breakdown_by=WebStatsBreakdown.PAGE, action=action + ) + + assert [ + ["https://www.example.com/foo", (2, 0), (3, 0), (2, 0), (1, None)], + ["https://www.example.com/bar", (2, 0), (0, 0), (0, 0), (0, None)], + ] == response.results + assert [ + "context.columns.breakdown_value", + "context.columns.visitors", + "context.columns.total_conversions", + "context.columns.unique_conversions", + "context.columns.conversion_rate", + ] == response.columns diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py index b73772ef79a909..016e1e50e8dada 100644 --- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py +++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py @@ -11,12 +11,15 @@ from posthog.caching.insights_api import BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL from posthog.hogql import ast from posthog.hogql.parser import parse_expr, parse_select -from posthog.hogql.property import property_to_expr +from posthog.hogql.property import property_to_expr, action_to_expr from posthog.hogql.query import execute_hogql_query from posthog.hogql_queries.query_runner import QueryRunner from posthog.hogql_queries.utils.query_date_range import QueryDateRange +from posthog.models import Action from posthog.models.filters.mixins.utils import cached_property from posthog.schema import ( + ActionConversionGoal, + CustomEventConversionGoal, EventPropertyFilter, WebOverviewQuery, WebStatsTableQuery, @@ -57,6 +60,57 @@ def property_filters_without_pathname( ) -> list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]]: return [p for p in self.query.properties if p.key != "$pathname"] + @cached_property + def conversion_goal_expr(self) -> Optional[ast.Expr]: + if isinstance(self.query.conversionGoal, ActionConversionGoal): + action = Action.objects.get(pk=self.query.conversionGoal.actionId, team__project_id=self.team.project_id) + return action_to_expr(action) + elif isinstance(self.query.conversionGoal, CustomEventConversionGoal): + return ast.CompareOperation( + left=ast.Field(chain=["events", "event"]), + op=ast.CompareOperationOp.Eq, + right=ast.Constant(value=self.query.conversionGoal.customEventName), + ) + else: + return None + + @cached_property + def conversion_count_expr(self) -> Optional[ast.Expr]: + if self.conversion_goal_expr: + return ast.Call(name="countIf", args=[self.conversion_goal_expr]) + else: + return None + + @cached_property + def conversion_person_id_expr(self) -> Optional[ast.Expr]: + if self.conversion_goal_expr: + return ast.Call( + name="any", + args=[ + ast.Call( + name="if", + args=[ + self.conversion_goal_expr, + ast.Field(chain=["events", "person_id"]), + ast.Constant(value=None), + ], + ) + ], + ) + else: + return None + + @cached_property + def event_type_expr(self) -> ast.Expr: + pageview_expr = ast.CompareOperation( + op=ast.CompareOperationOp.Eq, left=ast.Field(chain=["event"]), right=ast.Constant(value="$pageview") + ) + + if self.conversion_goal_expr: + return ast.Call(name="or", args=[pageview_expr, self.conversion_goal_expr]) + else: + return pageview_expr + def period_aggregate(self, function_name, column_name, start, end, alias=None, params=None): expr = ast.Call( name=function_name + "If", diff --git a/posthog/hogql_queries/web_analytics/web_overview.py b/posthog/hogql_queries/web_analytics/web_overview.py index 2a41455a6ad293..62149a1eb7289a 100644 --- a/posthog/hogql_queries/web_analytics/web_overview.py +++ b/posthog/hogql_queries/web_analytics/web_overview.py @@ -5,20 +5,17 @@ from posthog.hogql import ast from posthog.hogql.parser import parse_select -from posthog.hogql.property import property_to_expr, get_property_type, action_to_expr +from posthog.hogql.property import property_to_expr, get_property_type from posthog.hogql.query import execute_hogql_query from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.hogql_queries.web_analytics.web_analytics_query_runner import ( WebAnalyticsQueryRunner, ) -from posthog.models import Action from posthog.models.filters.mixins.utils import cached_property from posthog.schema import ( CachedWebOverviewQueryResponse, WebOverviewQueryResponse, WebOverviewQuery, - ActionConversionGoal, - CustomEventConversionGoal, SessionTableVersion, ) @@ -97,39 +94,6 @@ def session_properties(self) -> ast.Expr: ] return property_to_expr(properties, team=self.team, scope="event") - @cached_property - def conversion_goal_expr(self) -> Optional[ast.Expr]: - if isinstance(self.query.conversionGoal, ActionConversionGoal): - action = Action.objects.get(pk=self.query.conversionGoal.actionId, team__project_id=self.team.project_id) - return action_to_expr(action) - elif isinstance(self.query.conversionGoal, CustomEventConversionGoal): - return ast.CompareOperation( - left=ast.Field(chain=["events", "event"]), - op=ast.CompareOperationOp.Eq, - right=ast.Constant(value=self.query.conversionGoal.customEventName), - ) - else: - return None - - @cached_property - def conversion_person_id_expr(self) -> Optional[ast.Expr]: - if self.conversion_goal_expr: - return ast.Call( - name="any", - args=[ - ast.Call( - name="if", - args=[ - self.conversion_goal_expr, - ast.Field(chain=["events", "person_id"]), - ast.Constant(value=None), - ], - ) - ], - ) - else: - return None - @cached_property def pageview_count_expression(self) -> ast.Expr: if self.conversion_goal_expr: @@ -146,24 +110,6 @@ def pageview_count_expression(self) -> ast.Expr: else: return ast.Call(name="count", args=[]) - @cached_property - def conversion_count_expr(self) -> Optional[ast.Expr]: - if self.conversion_goal_expr: - return ast.Call(name="countIf", args=[self.conversion_goal_expr]) - else: - return None - - @cached_property - def event_type_expr(self) -> ast.Expr: - pageview_expr = ast.CompareOperation( - op=ast.CompareOperationOp.Eq, left=ast.Field(chain=["event"]), right=ast.Constant(value="$pageview") - ) - - if self.conversion_goal_expr and self.conversion_goal_expr != ast.Constant(value=None): - return ast.Call(name="or", args=[pageview_expr, self.conversion_goal_expr]) - else: - return pageview_expr - @cached_property def inner_select(self) -> ast.SelectQuery: start = self.query_date_range.previous_period_date_from_as_hogql() diff --git a/posthog/management/commands/create_channel_definitions_file.py b/posthog/management/commands/create_channel_definitions_file.py index 6c254886084e45..8a62b6ec225880 100644 --- a/posthog/management/commands/create_channel_definitions_file.py +++ b/posthog/management/commands/create_channel_definitions_file.py @@ -231,6 +231,10 @@ def handle(self, *args, **options): # discord "discord.com", "discordapp.gg", + # twitter + "twitter.com", + "t.co", + "x.com", ): entries[(social_domain, EntryKind.source)] = SourceEntry("Social", "Paid Social", "Organic Social") diff --git a/posthog/management/commands/delete_persons.py b/posthog/management/commands/delete_persons.py new file mode 100644 index 00000000000000..b6e70aded78e67 --- /dev/null +++ b/posthog/management/commands/delete_persons.py @@ -0,0 +1,119 @@ +import logging + +from django.db import connection +import structlog +from django.core.management.base import BaseCommand + + +logger = structlog.get_logger(__name__) +logger.setLevel(logging.INFO) + + +class Command(BaseCommand): + help = "Delete a batch of persons from postgres" + + def add_arguments(self, parser): + parser.add_argument("--team-id", default=None, type=int, help="Specify a team to delete persons from.") + parser.add_argument( + "--person-ids", default=None, type=str, help="Specify a list of comma separated person ids to be deleted." + ) + parser.add_argument("--batch-size", default=1000, type=int, help="Number of rows to be deleted per batch") + parser.add_argument("--batches", default=1, type=int, help="Number of batches to run") + parser.add_argument("--live-run", action="store_true", help="Run changes, default is dry-run") + + def handle(self, *args, **options): + run(options) + + +def run(options): + live_run = options["live_run"] + team_id = options["team_id"] + person_ids = options["person_ids"].split(",") if options["person_ids"] else None + batches = options["batches"] + batch_size = options["batch_size"] + + if not team_id: + logger.error("You must specify --team-id to run this script") + return exit(1) + + # Print the plan + logger.info("Plan:") + if team_id: + logger.info(f"-> Team ID: {team_id}") + if person_ids: + logger.info(f"-> Person IDs: {person_ids}") + logger.info(f"-> Batches: {batches} of {batch_size}") + + select_query = f""" + SELECT id + FROM posthog_person + WHERE team_id=%(team_id)s {f"AND id IN ({person_ids})" if person_ids else ""} + ORDER BY id ASC + LIMIT %(limit)s + """ + + delete_query_person_distinct_ids = f""" + WITH to_delete AS ({select_query}) + DELETE FROM posthog_persondistinctid + WHERE team_id = %(team_id)s AND person_id IN (SELECT id FROM to_delete); + """ + + delete_query_person_override = f""" + WITH to_delete AS ({select_query}) + DELETE FROM posthog_personoverride + WHERE team_id = %(team_id)s AND (old_person_id IN (SELECT id FROM to_delete) OR override_person_id IN (SELECT id FROM to_delete)); + """ + + delete_query_person = f""" + WITH to_delete AS ({select_query}) + DELETE FROM posthog_person + WHERE team_id = %(team_id)s AND id IN (SELECT id FROM to_delete); + """ + + with connection.cursor() as cursor: + prepared_person_distinct_ids_query = cursor.mogrify( + delete_query_person_distinct_ids, {"team_id": team_id, "limit": batch_size, "person_ids": person_ids} + ) + prepared_person_override_query = cursor.mogrify( + delete_query_person_override, {"team_id": team_id, "limit": batch_size, "person_ids": person_ids} + ) + prepared_person_query = cursor.mogrify( + delete_query_person, {"team_id": team_id, "limit": batch_size, "person_ids": person_ids} + ) + + logger.info(f"Delete query to run:") + logger.info(prepared_person_distinct_ids_query) + logger.info(prepared_person_override_query) + logger.info(prepared_person_query) + + if not live_run: + logger.info(f"Dry run. Set --live-run to actually delete.") + return exit(0) + + confirm = input("Type 'delete' to confirm: ") + + if confirm != "delete": + logger.info("Aborting") + return exit(0) + + logger.info(f"Executing delete query...") + + for i in range(0, batches): + logger.info(f"Deleting batch {i + 1} of {batches} ({batch_size} rows)") + with connection.cursor() as cursor: + cursor.execute( + delete_query_person_distinct_ids, {"team_id": team_id, "limit": batch_size, "person_ids": person_ids} + ) + logger.info(f"Deleted {cursor.rowcount} distinct_ids") + cursor.execute( + delete_query_person_override, {"team_id": team_id, "limit": batch_size, "person_ids": person_ids} + ) + logger.info(f"Deleted {cursor.rowcount} person overrides") + cursor.execute(delete_query_person, {"team_id": team_id, "limit": batch_size, "person_ids": person_ids}) + logger.info(f"Deleted {cursor.rowcount} persons") + + if cursor.rowcount < batch_size: + logger.info(f"Exiting early as we received less than {batch_size} rows") + break + + logger.info("Done") diff --git a/posthog/management/commands/migrate_action_webhooks.py b/posthog/management/commands/migrate_action_webhooks.py index c35a90f61232ba..6dd92d53ad682d 100644 --- a/posthog/management/commands/migrate_action_webhooks.py +++ b/posthog/management/commands/migrate_action_webhooks.py @@ -140,7 +140,7 @@ def convert_to_hog_function(action: Action, inert=False) -> Optional[HogFunction inputs_schema=webhook_template.inputs_schema, template_id=webhook_template.id, hog=hog_code, - bytecode=compile_hog(hog_code), + bytecode=compile_hog(hog_code, "destination"), filters=compile_filters_bytecode( {"actions": [{"id": f"{action.id}", "type": "actions", "name": action.name, "order": 0}]}, action.team ), diff --git a/posthog/management/commands/sync_remote_configs.py b/posthog/management/commands/sync_remote_configs.py new file mode 100644 index 00000000000000..5b8944f370d9a6 --- /dev/null +++ b/posthog/management/commands/sync_remote_configs.py @@ -0,0 +1,12 @@ +from django.core.management.base import BaseCommand + +from posthog.tasks.remote_config import sync_all_remote_configs + + +class Command(BaseCommand): + help = "Sync all RemoteConfigs" + + def handle(self, *args, **options): + print("Syncing RemoteConfigs for all teams...") # noqa: T201 + sync_all_remote_configs() + print("All syncs scheduled") # noqa: T201 diff --git a/posthog/management/commands/warehouse_last_incremental_value_sync.py b/posthog/management/commands/warehouse_last_incremental_value_sync.py new file mode 100644 index 00000000000000..0c610d92164de9 --- /dev/null +++ b/posthog/management/commands/warehouse_last_incremental_value_sync.py @@ -0,0 +1,143 @@ +from typing import Any, Optional +from django.core.management.base import BaseCommand +from django.conf import settings + +import dlt +import dlt.common +import dlt.common.pipeline +import dlt.common.configuration.resolve + +from dlt.common.configuration.specs.base_configuration import ( + is_secret_hint, +) +from dlt.common.configuration.providers.provider import ConfigProvider +from dlt.common.configuration.exceptions import ( + LookupTrace, + ValueNotSecretException, +) + +from posthog.warehouse.models.external_data_schema import ExternalDataSchema + + +# Redefine a broken DLT func +def _resolve_single_provider_value( + provider: ConfigProvider, + key: str, + hint: type[Any], + pipeline_name: str | None = None, # type: ignore + config_section: str | None = None, # type: ignore + explicit_sections: tuple[str, ...] = (), + embedded_sections: tuple[str, ...] = (), +) -> tuple[Optional[Any], list[LookupTrace]]: + traces: list[LookupTrace] = [] + + if provider.supports_sections: + ns = list(explicit_sections if explicit_sections is not None else ()) # This was the broken line + # always extend with embedded sections + ns.extend(embedded_sections) + else: + # if provider does not support sections and pipeline name is set then ignore it + if pipeline_name: + return None, traces + else: + # pass empty sections + ns = [] + + value = None + while True: + if config_section and provider.supports_sections: + full_ns = ns.copy() + # config section, is always present and innermost + if config_section: + full_ns.append(config_section) + else: + full_ns = ns + value, ns_key = provider.get_value(key, hint, pipeline_name, *full_ns) + # if secret is obtained from non secret provider, we must fail + cant_hold_it: bool = not provider.supports_secrets and is_secret_hint(hint) + if value is not None and cant_hold_it: + raise ValueNotSecretException(provider.name, ns_key) + + # create trace, ignore providers that cant_hold_it + if not cant_hold_it: + traces.append(LookupTrace(provider.name, full_ns, ns_key, value)) + + if value is not None: + # value found, ignore further sections + break + if len(ns) == 0: + # sections exhausted + break + # pop optional sections for less precise lookup + ns.pop() + + return value, traces + + +dlt.common.configuration.resolve.resolve_single_provider_value = _resolve_single_provider_value + + +class Command(BaseCommand): + help = "Sync Data Warehouse last incremental values from DLT S3" + + def handle(self, *args, **options): + destination = dlt.destinations.filesystem( + credentials={ + "aws_access_key_id": settings.AIRBYTE_BUCKET_KEY, + "aws_secret_access_key": settings.AIRBYTE_BUCKET_SECRET, + "region_name": settings.AIRBYTE_BUCKET_REGION, + "AWS_DEFAULT_REGION": settings.AIRBYTE_BUCKET_REGION, + "AWS_S3_ALLOW_UNSAFE_RENAME": "true", + }, + bucket_url=str(settings.BUCKET_URL), + ) + + schemas = ( + ExternalDataSchema.objects.filter(sync_type="incremental", deleted=False) + .exclude(sync_type_config__has_key="incremental_field_last_value") + .select_related("source") + ) + + total_schemas = len(schemas) + print(f"Total schemas: {total_schemas}") # noqa: T201 + + for index, schema in enumerate(schemas): + print(f"Updating schema {index + 1}/{total_schemas} - Schema.ID: {schema.pk}") # noqa: T201 + + dataset_name = schema.folder_path() + team_id = schema.team_id + schema_id = str(schema.id) + job_type = schema.source.source_type + pipeline_name = f"{job_type}_pipeline_{team_id}_run_{schema_id}" + + pipeline = dlt.pipeline( + pipeline_name=pipeline_name, + destination=destination, + dataset_name=dataset_name, + ) + + pipeline.sync_destination() + + try: + sources = pipeline.state["sources"] # type: ignore + resource = sources[next(iter(sources.keys()))] + resources = resource["resources"] + tables = resources[next(iter(resources.keys()))] + table = tables[next(iter(tables.keys()))] + incremental = table[next(iter(table.keys()))] + last_incremental_value = incremental.get("last_value") + except Exception as e: + print(f"Cant get last_incremental_value for schema: {schema.pk}. ERROR: {e}") # noqa: T201 + pipeline.drop() + continue + + try: + schema.update_incremental_field_last_value(last_incremental_value) + except Exception as e: + print( # noqa: T201 + f"Cant update_incremental_field_last_value for schema: {schema.pk}. With last_incremental_value={last_incremental_value}. ERROR: {e}" + ) + pipeline.drop() + continue + + pipeline.drop() diff --git a/posthog/middleware.py b/posthog/middleware.py index ee132dc78d0af5..af7b6768d9c540 100644 --- a/posthog/middleware.py +++ b/posthog/middleware.py @@ -677,7 +677,19 @@ def get_impersonated_session_expires_at(request: HttpRequest) -> Optional[dateti init_time = get_or_set_session_cookie_created_at(request=request) - return datetime.fromtimestamp(init_time) + timedelta(seconds=settings.IMPERSONATION_TIMEOUT_SECONDS) + last_activity_time = request.session.get(settings.IMPERSONATION_COOKIE_LAST_ACTIVITY_KEY, init_time) + + # If the last activity time is less than the idle timeout, we extend the session + if time.time() - last_activity_time < settings.IMPERSONATION_IDLE_TIMEOUT_SECONDS: + last_activity_time = request.session[settings.IMPERSONATION_COOKIE_LAST_ACTIVITY_KEY] = time.time() + request.session.modified = True + + idle_expiry_time = datetime.fromtimestamp(last_activity_time) + timedelta( + seconds=settings.IMPERSONATION_IDLE_TIMEOUT_SECONDS + ) + total_expiry_time = datetime.fromtimestamp(init_time) + timedelta(seconds=settings.IMPERSONATION_TIMEOUT_SECONDS) + + return min(idle_expiry_time, total_expiry_time) class AutoLogoutImpersonateMiddleware: diff --git a/posthog/migrations/0529_hog_function_mappings.py b/posthog/migrations/0529_hog_function_mappings.py new file mode 100644 index 00000000000000..a15735cc892907 --- /dev/null +++ b/posthog/migrations/0529_hog_function_mappings.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.15 on 2024-12-10 11:36 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0528_project_field_in_taxonomy"), + ] + + operations = [ + migrations.AddField( + model_name="hogfunction", + name="mappings", + field=models.JSONField(blank=True, null=True), + ), + ] diff --git a/posthog/migrations/0530_convert_dashboard_templates_to_queries.py b/posthog/migrations/0530_convert_dashboard_templates_to_queries.py new file mode 100644 index 00000000000000..393032d32a1cac --- /dev/null +++ b/posthog/migrations/0530_convert_dashboard_templates_to_queries.py @@ -0,0 +1,31 @@ +# Generated by Django 4.2.15 on 2024-11-04 11:24 + +from django.db import migrations + +from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query +from posthog.schema import InsightVizNode + + +def update_filters_to_queries(apps, schema_editor): + DashboardTemplate = apps.get_model("posthog", "DashboardTemplate") + + for template in DashboardTemplate.objects.all(): + for tile in template.tiles: + if "filters" in tile: + source = filter_to_query(tile["filters"], allow_variables=True) + query = InsightVizNode(source=source) + tile["query"] = query.model_dump(exclude_none=True) + del tile["filters"] + template.save() + + +def revert_queries_to_filters(apps, schema_editor): + pass + + +class Migration(migrations.Migration): + dependencies = [("posthog", "0529_hog_function_mappings")] + + operations = [ + migrations.RunPython(update_filters_to_queries, revert_queries_to_filters), + ] diff --git a/posthog/migrations/0531_alter_hogfunction_type.py b/posthog/migrations/0531_alter_hogfunction_type.py new file mode 100644 index 00000000000000..b3160bc5fbc2de --- /dev/null +++ b/posthog/migrations/0531_alter_hogfunction_type.py @@ -0,0 +1,33 @@ +# Generated by Django 4.2.15 on 2024-12-13 13:22 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0530_convert_dashboard_templates_to_queries"), + ] + + operations = [ + migrations.AlterField( + model_name="hogfunction", + name="type", + field=models.CharField( + blank=True, + choices=[ + ("destination", "Destination"), + ("site_destination", "Site Destination"), + ("site_app", "Site App"), + ("transformation", "Transformation"), + ("email", "Email"), + ("sms", "Sms"), + ("push", "Push"), + ("activity", "Activity"), + ("alert", "Alert"), + ("broadcast", "Broadcast"), + ], + max_length=24, + null=True, + ), + ), + ] diff --git a/posthog/migrations/0532_taxonomy_unique_on_project.py b/posthog/migrations/0532_taxonomy_unique_on_project.py new file mode 100644 index 00000000000000..2ba5b98d2652c9 --- /dev/null +++ b/posthog/migrations/0532_taxonomy_unique_on_project.py @@ -0,0 +1,119 @@ +# Generated by Django 4.2.15 on 2024-12-09 15:51 + +from django.db import migrations +from django.db import models +import django.db.models.functions.comparison +import posthog.models.utils +from django.contrib.postgres.operations import AddIndexConcurrently, RemoveIndexConcurrently + + +class Migration(migrations.Migration): + atomic = False # Added to support concurrent index creation + dependencies = [("posthog", "0531_alter_hogfunction_type")] + + operations = [ + # First clean up rows that would fail the project-based unique constraints we're adding + migrations.RunSQL( + sql=""" + DELETE FROM posthog_propertydefinition + WHERE team_id IN ( + SELECT id FROM posthog_team WHERE id != project_id + );""", + reverse_sql=migrations.RunSQL.noop, + elidable=True, + ), + migrations.RunSQL( + sql=""" + DELETE FROM posthog_eventdefinition + WHERE team_id IN ( + SELECT id FROM posthog_team WHERE id != project_id + );""", + reverse_sql=migrations.RunSQL.noop, + elidable=True, + ), + migrations.RunSQL( + sql=""" + DELETE FROM posthog_eventproperty + WHERE team_id IN ( + SELECT id FROM posthog_team WHERE id != project_id + );""", + reverse_sql=migrations.RunSQL.noop, + elidable=True, + ), + # Remove misguided `project_id`-only indexes from the previous migration + RemoveIndexConcurrently( + model_name="eventproperty", + name="posthog_eve_proj_id_22de03_idx", + ), + RemoveIndexConcurrently( + model_name="eventproperty", + name="posthog_eve_proj_id_26dbfb_idx", + ), + RemoveIndexConcurrently( + model_name="propertydefinition", + name="index_property_def_query_proj", + ), + RemoveIndexConcurrently( + model_name="propertydefinition", + name="posthog_pro_project_3583d2_idx", + ), + # Add new useful indexes using `coalesce(project_id, team_id)` + AddIndexConcurrently( + model_name="eventproperty", + index=models.Index( + django.db.models.functions.comparison.Coalesce(models.F("project_id"), models.F("team_id")), + models.F("event"), + name="posthog_eve_proj_id_22de03_idx", + ), + ), + AddIndexConcurrently( + model_name="eventproperty", + index=models.Index( + django.db.models.functions.comparison.Coalesce(models.F("project_id"), models.F("team_id")), + models.F("property"), + name="posthog_eve_proj_id_26dbfb_idx", + ), + ), + AddIndexConcurrently( + model_name="propertydefinition", + index=models.Index( + django.db.models.functions.comparison.Coalesce(models.F("project_id"), models.F("team_id")), + models.F("type"), + django.db.models.functions.comparison.Coalesce(models.F("group_type_index"), -1), + models.OrderBy(models.F("query_usage_30_day"), descending=True, nulls_last=True), + models.OrderBy(models.F("name")), + name="index_property_def_query_proj", + ), + ), + AddIndexConcurrently( + model_name="propertydefinition", + index=models.Index( + django.db.models.functions.comparison.Coalesce(models.F("project_id"), models.F("team_id")), + models.F("type"), + models.F("is_numerical"), + name="posthog_pro_project_3583d2_idx", + ), + ), + migrations.AddConstraint( + model_name="eventdefinition", + constraint=posthog.models.utils.UniqueConstraintByExpression( + concurrently=True, expression="(coalesce(project_id, team_id), name)", name="event_definition_proj_uniq" + ), + ), + migrations.AddConstraint( + model_name="eventproperty", + constraint=posthog.models.utils.UniqueConstraintByExpression( + concurrently=True, + expression="(coalesce(project_id, team_id), event, property)", + name="posthog_event_property_unique_proj_event_property", + ), + ), + migrations.AddConstraint( + model_name="propertydefinition", + constraint=posthog.models.utils.UniqueConstraintByExpression( + concurrently=True, + expression="(coalesce(project_id, team_id), name, type, coalesce(group_type_index, -1))", + name="posthog_propdef_proj_uniq", + ), + ), + ] diff --git a/posthog/migrations/max_migration.txt b/posthog/migrations/max_migration.txt index 647b659f0832e3..01fc03d62a8a09 100644 --- a/posthog/migrations/max_migration.txt +++ b/posthog/migrations/max_migration.txt @@ -1 +1 @@ -0528_project_field_in_taxonomy +0532_taxonomy_unique_on_project diff --git a/posthog/models/channel_type/channel_definitions.json b/posthog/models/channel_type/channel_definitions.json index 9c0e31e46743b4..3558859cd81af8 100644 --- a/posthog/models/channel_type/channel_definitions.json +++ b/posthog/models/channel_type/channel_definitions.json @@ -47,6 +47,7 @@ ["com.alibaba.intl.android.apps.poseidon", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], ["com.alibaba.intl.android.apps.twa", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], ["com.alibaba.sourcing", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], + ["com.alibaba.sourcing.ent", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], ["m.alibaba.com", "source", "Shopping", "Paid Shopping", "Organic Shopping", false], ["message.alibaba.com", "source", "Shopping", "Paid Shopping", "Organic Shopping", false], ["offer.alibaba.com", "source", "Shopping", "Paid Shopping", "Organic Shopping", false], @@ -62,6 +63,8 @@ ["amazon.co.uk", "source", "Shopping", "Paid Shopping", "Organic Shopping", false], ["amazon.com", "source", "Shopping", "Paid Shopping", "Organic Shopping", false], ["com.amazon.avod.thirdpartyclient", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], + ["com.amazon.kindle", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], + ["com.amazon.kindlefs", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], ["com.amazon.mp3", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], ["com.amazon.mshop.android.business.shopping", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], ["com.amazon.mshop.android.shopping", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], @@ -100,6 +103,11 @@ ["askubuntu", "source", "Social", "Paid Social", "Organic Social", false], ["askubuntu.com", "source", "Social", "Paid Social", "Organic Social", false], ["asmallworld.com", "source", "Social", "Paid Social", "Organic Social", false], + ["com.asmallworld.android", "source", "Social", "Paid Social", "Organic Social", true], + ["com.asmallworld.android_europa", "source", "Social", "Paid Social", "Organic Social", true], + ["com.asmallworld.android_ganymede", "source", "Social", "Paid Social", "Organic Social", true], + ["com.asmallworld.android_io", "source", "Social", "Paid Social", "Organic Social", true], + ["net.asmallworld.ios", "source", "Social", "Paid Social", "Organic Social", true], ["com.atebits.tweetie2", "source", "Social", "Paid Social", "Organic Social", true], ["athlinks", "source", "Social", "Paid Social", "Organic Social", false], ["athlinks.com", "source", "Social", "Paid Social", "Organic Social", false], @@ -130,6 +138,7 @@ ["bebo.com", "source", "Social", "Paid Social", "Organic Social", false], ["beforeitsnews", "source", "Social", "Paid Social", "Organic Social", false], ["beforeitsnews.com", "source", "Social", "Paid Social", "Organic Social", false], + ["ru.beru.android", "source", "Search", "Paid Search", "Organic Search", true], ["bharatstudent", "source", "Social", "Paid Social", "Organic Social", false], ["bharatstudent.com", "source", "Social", "Paid Social", "Organic Social", false], ["biglobe", "source", "Search", "Paid Search", "Organic Search", false], @@ -314,6 +323,8 @@ ["epicurious.com", "source", "Social", "Paid Social", "Organic Social", false], ["etsy", "source", "Shopping", "Paid Shopping", "Organic Shopping", false], ["com.etsy.android", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], + ["com.etsy.butter.sell_on_etsy", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], + ["com.etsy.butter.sell_on_etsy.enterprise", "source", "Shopping", "Paid Shopping", "Organic Shopping", true], ["etsy.com", "source", "Shopping", "Paid Shopping", "Organic Shopping", false], ["everforo.com", "source", "Social", "Paid Social", "Organic Social", false], ["exalead.com", "source", "Search", "Paid Search", "Organic Search", false], @@ -341,6 +352,7 @@ ["com.facebook.pages.app", "source", "Social", "Paid Social", "Organic Social", true], ["com.facebook.scenes", "source", "Social", "Paid Social", "Organic Social", true], ["com.facebook.shoerack", "source", "Social", "Paid Social", "Organic Social", true], + ["com.facebook.stella", "source", "Social", "Paid Social", "Organic Social", true], ["com.facebook.stellaapp", "source", "Social", "Paid Social", "Organic Social", true], ["com.facebook.stellaapp.master", "source", "Social", "Paid Social", "Organic Social", true], ["com.facebook.wakizashi", "source", "Social", "Paid Social", "Organic Social", true], @@ -925,10 +937,12 @@ ["ning.com", "source", "Social", "Paid Social", "Organic Social", false], ["com.oath.yahooapp", "source", "Social", "Paid Social", "Organic Social", true], ["com.oculus.facebook", "source", "Social", "Paid Social", "Organic Social", true], + ["com.oculus.igvr", "source", "Social", "Paid Social", "Organic Social", true], ["odnoklassniki.ru", "source", "Social", "Paid Social", "Organic Social", false], ["ru.odnoklassniki.iphone", "source", "Social", "Paid Social", "Organic Social", true], ["odnoklassniki.ua", "source", "Social", "Paid Social", "Organic Social", false], ["ru.ok.android", "source", "Social", "Paid Social", "Organic Social", true], + ["ru.ok.dating", "source", "Social", "Paid Social", "Organic Social", true], ["ru.ok.live", "source", "Social", "Paid Social", "Organic Social", true], ["okwave.jp", "source", "Social", "Paid Social", "Organic Social", false], ["onesearch.com", "source", "Search", "Paid Search", "Organic Search", false], @@ -1350,6 +1364,7 @@ ["org.wordpress.android", "source", "Social", "Paid Social", "Organic Social", true], ["wordpress.org", "source", "Social", "Paid Social", "Organic Social", false], ["forums.wpcentral.com", "source", "Social", "Paid Social", "Organic Social", false], + ["x.com", "source", "Social", "Paid Social", "Organic Social", false], ["xanga", "source", "Social", "Paid Social", "Organic Social", false], ["xanga.com", "source", "Social", "Paid Social", "Organic Social", false], ["xing", "source", "Social", "Paid Social", "Organic Social", false], @@ -1457,6 +1472,7 @@ ["ru.yandex.mobile.yamb", "source", "Search", "Paid Search", "Organic Search", true], ["ru.yandex.mobile.zen", "source", "Search", "Paid Search", "Organic Search", true], ["ru.yandex.mobile.zen-enterprise", "source", "Search", "Paid Search", "Organic Search", true], + ["ru.yandex.music", "source", "Search", "Paid Search", "Organic Search", true], ["ru.yandex.rhythm", "source", "Search", "Paid Search", "Organic Search", true], ["ru.yandex.searchplugin", "source", "Search", "Paid Search", "Organic Search", true], ["ru.yandex.taxi", "source", "Search", "Paid Search", "Organic Search", true], diff --git a/posthog/models/dashboard_templates.py b/posthog/models/dashboard_templates.py index 4826be8c9acfa7..e8fb368a492ca0 100644 --- a/posthog/models/dashboard_templates.py +++ b/posthog/models/dashboard_templates.py @@ -69,12 +69,29 @@ def original_template() -> "DashboardTemplate": "name": "Daily active users (DAUs)", "type": "INSIGHT", "color": "blue", - "filters": { - "events": [{"id": "$pageview", "math": "dau", "type": "events"}], - "display": "ActionsLineGraph", - "insight": "TRENDS", - "interval": "day", - "date_from": "-30d", + "query": { + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "series": [ + {"kind": "EventsNode", "math": "dau", "name": "$pageview", "event": "$pageview"} + ], + "interval": "day", + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "properties": [], + "trendsFilter": { + "display": "ActionsLineGraph", + "showLegend": False, + "yAxisScaleType": "linear", + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "showPercentStackView": False, + "aggregationAxisFormat": "numeric", + "showAlertThresholdLines": False, + }, + "breakdownFilter": {"breakdown_type": "event"}, + "filterTestAccounts": False, + }, }, "layouts": { "sm": {"h": 5, "w": 6, "x": 0, "y": 0, "minH": 5, "minW": 3}, @@ -86,18 +103,29 @@ def original_template() -> "DashboardTemplate": "name": "Weekly active users (WAUs)", "type": "INSIGHT", "color": "green", - "filters": { - "events": [ - { - "id": "$pageview", - "math": "dau", - "type": "events", - } - ], - "display": "ActionsLineGraph", - "insight": "TRENDS", - "interval": "week", - "date_from": "-90d", + "query": { + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "series": [ + {"kind": "EventsNode", "math": "dau", "name": "$pageview", "event": "$pageview"} + ], + "interval": "week", + "dateRange": {"date_from": "-90d", "explicitDate": False}, + "properties": [], + "trendsFilter": { + "display": "ActionsLineGraph", + "showLegend": False, + "yAxisScaleType": "linear", + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "showPercentStackView": False, + "aggregationAxisFormat": "numeric", + "showAlertThresholdLines": False, + }, + "breakdownFilter": {"breakdown_type": "event"}, + "filterTestAccounts": False, + }, }, "layouts": { "sm": {"h": 5, "w": 6, "x": 6, "y": 0, "minH": 5, "minW": 3}, @@ -109,12 +137,21 @@ def original_template() -> "DashboardTemplate": "name": "Retention", "type": "INSIGHT", "color": "blue", - "filters": { - "period": "Week", - "insight": "RETENTION", - "target_entity": {"id": "$pageview", "type": "events"}, - "retention_type": "retention_first_time", - "returning_entity": {"id": "$pageview", "type": "events"}, + "query": { + "kind": "InsightVizNode", + "source": { + "kind": "RetentionQuery", + "dateRange": {"date_from": "-7d", "explicitDate": False}, + "properties": [], + "retentionFilter": { + "period": "Week", + "targetEntity": {"id": "$pageview", "type": "events"}, + "retentionType": "retention_first_time", + "totalIntervals": 11, + "returningEntity": {"id": "$pageview", "type": "events"}, + }, + "filterTestAccounts": False, + }, }, "layouts": { "sm": {"h": 5, "w": 6, "x": 6, "y": 5, "minH": 5, "minW": 3}, @@ -126,13 +163,17 @@ def original_template() -> "DashboardTemplate": "name": "Growth accounting", "type": "INSIGHT", "color": "purple", - "filters": { - "events": [{"id": "$pageview", "type": "events"}], - "insight": "LIFECYCLE", - "interval": "week", - "shown_as": "Lifecycle", - "date_from": "-30d", - "entity_type": "events", + "query": { + "kind": "InsightVizNode", + "source": { + "kind": "LifecycleQuery", + "series": [{"kind": "EventsNode", "name": "$pageview", "event": "$pageview"}], + "interval": "week", + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "properties": [], + "lifecycleFilter": {"showLegend": False}, + "filterTestAccounts": False, + }, }, "layouts": { "sm": {"h": 5, "w": 6, "x": 0, "y": 5, "minH": 5, "minW": 3}, @@ -144,14 +185,29 @@ def original_template() -> "DashboardTemplate": "name": "Referring domain (last 14 days)", "type": "INSIGHT", "color": "black", - "filters": { - "events": [{"id": "$pageview", "math": "dau", "type": "events"}], - "display": "ActionsBarValue", - "insight": "TRENDS", - "interval": "day", - "breakdown": "$referring_domain", - "date_from": "-14d", - "breakdown_type": "event", + "query": { + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "series": [ + {"kind": "EventsNode", "math": "dau", "name": "$pageview", "event": "$pageview"} + ], + "interval": "day", + "dateRange": {"date_from": "-14d", "explicitDate": False}, + "properties": [], + "trendsFilter": { + "display": "ActionsBarValue", + "showLegend": False, + "yAxisScaleType": "linear", + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "showPercentStackView": False, + "aggregationAxisFormat": "numeric", + "showAlertThresholdLines": False, + }, + "breakdownFilter": {"breakdown": "$referring_domain", "breakdown_type": "event"}, + "filterTestAccounts": False, + }, }, "layouts": { "sm": {"h": 5, "w": 6, "x": 0, "y": 10, "minH": 5, "minW": 3}, @@ -163,35 +219,46 @@ def original_template() -> "DashboardTemplate": "name": "Pageview funnel, by browser", "type": "INSIGHT", "color": "green", - "filters": { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "custom_name": "First page view", + "query": { + "kind": "InsightVizNode", + "source": { + "kind": "FunnelsQuery", + "series": [ + { + "kind": "EventsNode", + "name": "$pageview", + "event": "$pageview", + "custom_name": "First page view", + }, + { + "kind": "EventsNode", + "name": "$pageview", + "event": "$pageview", + "custom_name": "Second page view", + }, + { + "kind": "EventsNode", + "name": "$pageview", + "event": "$pageview", + "custom_name": "Third page view", + }, + ], + "interval": "day", + "dateRange": {"date_from": "-7d", "explicitDate": False}, + "properties": [], + "funnelsFilter": { + "layout": "horizontal", + "exclusions": [], + "funnelVizType": "steps", + "funnelOrderType": "ordered", + "funnelStepReference": "total", + "funnelWindowInterval": 14, + "breakdownAttributionType": "first_touch", + "funnelWindowIntervalUnit": "day", }, - { - "id": "$pageview", - "type": "events", - "order": 1, - "custom_name": "Second page view", - }, - { - "id": "$pageview", - "type": "events", - "order": 2, - "custom_name": "Third page view", - }, - ], - "layout": "horizontal", - "display": "FunnelViz", - "insight": "FUNNELS", - "interval": "day", - "exclusions": [], - "breakdown_type": "event", - "breakdown": "$browser", - "funnel_viz_type": "steps", + "breakdownFilter": {"breakdown": "$browser", "breakdown_type": "event"}, + "filterTestAccounts": False, + }, }, "layouts": { "sm": {"h": 5, "w": 6, "x": 6, "y": 10, "minH": 5, "minW": 3}, @@ -214,12 +281,29 @@ def feature_flag_template(feature_flag_key: str) -> "DashboardTemplate": "name": "Daily active users (DAUs)", "type": "INSIGHT", "color": "blue", - "filters": { - "events": [{"id": "$pageview", "math": "dau", "type": "events"}], - "display": "ActionsLineGraph", - "insight": "TRENDS", - "interval": "day", - "date_from": "-30d", + "query": { + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "series": [ + {"kind": "EventsNode", "math": "dau", "name": "$pageview", "event": "$pageview"} + ], + "interval": "day", + "dateRange": {"date_from": "-30d", "explicitDate": False}, + "properties": [], + "trendsFilter": { + "display": "ActionsLineGraph", + "showLegend": False, + "yAxisScaleType": "linear", + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "showPercentStackView": False, + "aggregationAxisFormat": "numeric", + "showAlertThresholdLines": False, + }, + "breakdownFilter": {"breakdown_type": "event"}, + "filterTestAccounts": False, + }, }, "layouts": { "sm": {"h": 5, "w": 6, "x": 0, "y": 0, "minH": 5, "minW": 3}, @@ -231,18 +315,29 @@ def feature_flag_template(feature_flag_key: str) -> "DashboardTemplate": "name": "Weekly active users (WAUs)", "type": "INSIGHT", "color": "green", - "filters": { - "events": [ - { - "id": "$pageview", - "math": "dau", - "type": "events", - } - ], - "display": "ActionsLineGraph", - "insight": "TRENDS", - "interval": "week", - "date_from": "-90d", + "query": { + "kind": "InsightVizNode", + "source": { + "kind": "TrendsQuery", + "series": [ + {"kind": "EventsNode", "math": "dau", "name": "$pageview", "event": "$pageview"} + ], + "interval": "week", + "dateRange": {"date_from": "-90d", "explicitDate": False}, + "properties": [], + "trendsFilter": { + "display": "ActionsLineGraph", + "showLegend": False, + "yAxisScaleType": "linear", + "showValuesOnSeries": False, + "smoothingIntervals": 1, + "showPercentStackView": False, + "aggregationAxisFormat": "numeric", + "showAlertThresholdLines": False, + }, + "breakdownFilter": {"breakdown_type": "event"}, + "filterTestAccounts": False, + }, }, "layouts": { "sm": {"h": 5, "w": 6, "x": 6, "y": 0, "minH": 5, "minW": 3}, diff --git a/posthog/models/event_definition.py b/posthog/models/event_definition.py index 47a14035652ed0..9d1d574631858c 100644 --- a/posthog/models/event_definition.py +++ b/posthog/models/event_definition.py @@ -3,7 +3,7 @@ from django.utils import timezone from posthog.models.team import Team -from posthog.models.utils import UUIDModel +from posthog.models.utils import UUIDModel, UniqueConstraintByExpression class EventDefinition(UUIDModel): @@ -27,7 +27,6 @@ class EventDefinition(UUIDModel): volume_30_day = models.IntegerField(default=None, null=True) class Meta: - unique_together = ("team", "name") indexes = [ # Index on project_id foreign key models.Index(fields=["project"], name="posthog_eve_proj_id_f93fcbb0"), @@ -37,6 +36,14 @@ class Meta: opclasses=["gin_trgm_ops"], ), # To speed up DB-based fuzzy searching ] + unique_together = ("team", "name") + constraints = [ + UniqueConstraintByExpression( + concurrently=True, + name="event_definition_proj_uniq", + expression="(coalesce(project_id, team_id), name)", + ) + ] def __str__(self) -> str: return f"{self.name} / {self.team.name}" diff --git a/posthog/models/event_property.py b/posthog/models/event_property.py index 61f5d27baace6d..4dc05a98e57010 100644 --- a/posthog/models/event_property.py +++ b/posthog/models/event_property.py @@ -1,7 +1,9 @@ from django.db import models from posthog.models.team import Team -from posthog.models.utils import sane_repr +from posthog.models.utils import UniqueConstraintByExpression, sane_repr +from django.db.models.expressions import F +from django.db.models.functions import Coalesce class EventProperty(models.Model): @@ -15,15 +17,20 @@ class Meta: models.UniqueConstraint( fields=["team", "event", "property"], name="posthog_event_property_unique_team_event_property", - ) + ), + UniqueConstraintByExpression( + concurrently=True, + name="posthog_event_property_unique_proj_event_property", + expression="(coalesce(project_id, team_id), event, property)", + ), ] indexes = [ # Index on project_id foreign key models.Index(fields=["project"], name="posthog_eve_proj_id_dd2337d2"), models.Index(fields=["team", "event"]), - models.Index(fields=["project", "event"], name="posthog_eve_proj_id_22de03_idx"), + models.Index(Coalesce(F("project_id"), F("team_id")), F("event"), name="posthog_eve_proj_id_22de03_idx"), models.Index(fields=["team", "property"]), - models.Index(fields=["project", "property"], name="posthog_eve_proj_id_26dbfb_idx"), + models.Index(Coalesce(F("project_id"), F("team_id")), F("property"), name="posthog_eve_proj_id_26dbfb_idx"), ] __repr__ = sane_repr("event", "property", "team_id") diff --git a/posthog/models/feature_flag/flag_matching.py b/posthog/models/feature_flag/flag_matching.py index 573d766e654981..05182a7f1fedeb 100644 --- a/posthog/models/feature_flag/flag_matching.py +++ b/posthog/models/feature_flag/flag_matching.py @@ -829,6 +829,7 @@ def get_all_feature_flags( ) all_feature_flags = get_feature_flags_for_team_in_cache(team_id) cache_hit = True + if all_feature_flags is None: cache_hit = False all_feature_flags = set_feature_flags_for_team_in_cache(team_id) diff --git a/posthog/models/feature_flag/flag_status.py b/posthog/models/feature_flag/flag_status.py index ab236bd9fcee93..fa7ad529293043 100644 --- a/posthog/models/feature_flag/flag_status.py +++ b/posthog/models/feature_flag/flag_status.py @@ -85,7 +85,7 @@ def is_flag_fully_rolled_out(self, flag: FeatureFlag) -> tuple[bool, FeatureFlag ) if multivariate and is_multivariate_flag_fully_rolled_out: return True, f'This flag will always use the variant "{fully_rolled_out_variant_name}"' - elif self.is_boolean_flag_fully_rolled_out(flag): + elif not multivariate and self.is_boolean_flag_fully_rolled_out(flag): return True, 'This boolean flag will always evaluate to "true"' return False, "" diff --git a/posthog/models/hog_functions/hog_function.py b/posthog/models/hog_functions/hog_function.py index 48e3db90a9dcd1..3ddd0212f21d58 100644 --- a/posthog/models/hog_functions/hog_function.py +++ b/posthog/models/hog_functions/hog_function.py @@ -37,6 +37,7 @@ class HogFunctionType(models.TextChoices): DESTINATION = "destination" SITE_DESTINATION = "site_destination" SITE_APP = "site_app" + TRANSFORMATION = "transformation" EMAIL = "email" SMS = "sms" PUSH = "push" @@ -45,7 +46,7 @@ class HogFunctionType(models.TextChoices): BROADCAST = "broadcast" -TYPES_THAT_RELOAD_PLUGIN_SERVER = (HogFunctionType.DESTINATION, HogFunctionType.EMAIL) +TYPES_THAT_RELOAD_PLUGIN_SERVER = (HogFunctionType.DESTINATION, HogFunctionType.EMAIL, HogFunctionType.TRANSFORMATION) TYPES_WITH_COMPILED_FILTERS = (HogFunctionType.DESTINATION,) TYPES_WITH_TRANSPILED_FILTERS = (HogFunctionType.SITE_DESTINATION, HogFunctionType.SITE_APP) TYPES_WITH_JAVASCRIPT_SOURCE = (HogFunctionType.SITE_DESTINATION, HogFunctionType.SITE_APP) @@ -81,6 +82,7 @@ class Meta: encrypted_inputs: EncryptedJSONStringField = EncryptedJSONStringField(null=True, blank=True) filters = models.JSONField(null=True, blank=True) + mappings = models.JSONField(null=True, blank=True) masking = models.JSONField(null=True, blank=True) template_id = models.CharField(max_length=400, null=True, blank=True) diff --git a/posthog/models/integration.py b/posthog/models/integration.py index d8e49cc5d67aab..f42f70da332d90 100644 --- a/posthog/models/integration.py +++ b/posthog/models/integration.py @@ -163,7 +163,7 @@ def oauth_config_for_kind(cls, kind: str) -> OauthConfig: authorize_url="https://app.hubspot.com/oauth/authorize", token_url="https://api.hubapi.com/oauth/v1/token", token_info_url="https://api.hubapi.com/oauth/v1/access-tokens/:access_token", - token_info_config_fields=["hub_id", "hub_domain", "user", "user_id"], + token_info_config_fields=["hub_id", "hub_domain", "user", "user_id", "scopes"], client_id=settings.HUBSPOT_APP_CLIENT_ID, client_secret=settings.HUBSPOT_APP_CLIENT_SECRET, scope="tickets crm.objects.contacts.write sales-email-read crm.objects.companies.read crm.objects.deals.read crm.objects.contacts.read crm.objects.quotes.read crm.objects.companies.write", @@ -187,7 +187,7 @@ def oauth_config_for_kind(cls, kind: str) -> OauthConfig: token_url="https://oauth2.googleapis.com/token", client_id=settings.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY, client_secret=settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET, - scope="https://www.googleapis.com/auth/adwords email", + scope="https://www.googleapis.com/auth/adwords https://www.googleapis.com/auth/userinfo.email", id_path="sub", name_path="email", ) diff --git a/posthog/models/messaging.py b/posthog/models/messaging.py index 1f013ce09f75dd..cd929d33b99864 100644 --- a/posthog/models/messaging.py +++ b/posthog/models/messaging.py @@ -19,6 +19,22 @@ def get_or_create(self, defaults=None, **kwargs): return super().get_or_create(defaults, **kwargs) + def filter(self, *args, **kwargs): + raw_email = kwargs.pop("raw_email", None) + + if raw_email: + kwargs["email_hash"] = get_email_hash(raw_email) + + return super().filter(*args, **kwargs) + + def get(self, *args, **kwargs): + raw_email = kwargs.pop("raw_email", None) + + if raw_email: + kwargs["email_hash"] = get_email_hash(raw_email) + + return super().get(*args, **kwargs) + class MessagingRecord(UUIDModel): objects = MessagingRecordManager() diff --git a/posthog/models/property_definition.py b/posthog/models/property_definition.py index 08e6cd5d1bed1d..b4cc20797c89ab 100644 --- a/posthog/models/property_definition.py +++ b/posthog/models/property_definition.py @@ -83,7 +83,7 @@ class Meta: name="index_property_def_query", ), models.Index( - F("project_id"), + Coalesce(F("project_id"), F("team_id")), F("type"), Coalesce(F("group_type_index"), -1), F("query_usage_30_day").desc(nulls_last=True), @@ -93,7 +93,12 @@ class Meta: # creates an index pganalyze identified as missing # https://app.pganalyze.com/servers/i35ydkosi5cy5n7tly45vkjcqa/checks/index_advisor/missing_index/15282978 models.Index(fields=["team_id", "type", "is_numerical"]), - models.Index(fields=["project_id", "type", "is_numerical"], name="posthog_pro_project_3583d2_idx"), + models.Index( + Coalesce(F("project_id"), F("team_id")), + F("type"), + F("is_numerical"), + name="posthog_pro_project_3583d2_idx", + ), GinIndex( name="index_property_definition_name", fields=["name"], @@ -113,6 +118,11 @@ class Meta: name="posthog_propertydefinition_uniq", expression="(team_id, name, type, coalesce(group_type_index, -1))", ), + UniqueConstraintByExpression( + concurrently=True, + name="posthog_propdef_proj_uniq", + expression="(coalesce(project_id, team_id), name, type, coalesce(group_type_index, -1))", + ), ] def __str__(self) -> str: diff --git a/posthog/models/remote_config.py b/posthog/models/remote_config.py index ed13f1b2cb48f0..5ffc726d0d1c28 100644 --- a/posthog/models/remote_config.py +++ b/posthog/models/remote_config.py @@ -4,8 +4,10 @@ from collections.abc import Callable from django.conf import settings from django.db import models +from django.http import HttpRequest from django.utils import timezone from prometheus_client import Counter +import requests from sentry_sdk import capture_exception import structlog @@ -37,6 +39,12 @@ labelnames=["result"], ) +REMOTE_CONFIG_CDN_PURGE_COUNTER = Counter( + "posthog_remote_config_cdn_purge", + "Number of times the remote config CDN purge task has been run", + labelnames=["result"], +) + logger = structlog.get_logger(__name__) @@ -65,6 +73,24 @@ def cache_key_for_team_token(team_token: str, suffix: str) -> str: return f"remote_config/{team_token}/{suffix}" +def sanitize_config_for_public_cdn(config: dict, request: Optional[HttpRequest] = None) -> dict: + from posthog.api.utils import on_permitted_recording_domain + + # Remove domains from session recording + if config.get("sessionRecording"): + if "domains" in config["sessionRecording"]: + domains = config["sessionRecording"].pop("domains") + + # Empty list of domains means always permitted + if request and domains: + if not on_permitted_recording_domain(domains, request=request): + config["sessionRecording"] = False + + # Remove site apps JS + config.pop("siteAppsJS", None) + return config + + class RemoteConfig(UUIDModel): """ RemoteConfig is a helper model. There is one per team and stores a highly cacheable JSON object @@ -113,9 +139,11 @@ def build_config(self): if team.autocapture_exceptions_opt_in else False ), - "analytics": {"endpoint": settings.NEW_ANALYTICS_CAPTURE_ENDPOINT}, } + if str(team.id) not in (settings.NEW_ANALYTICS_CAPTURE_EXCLUDED_TEAM_IDS or []): + config["analytics"] = {"endpoint": settings.NEW_ANALYTICS_CAPTURE_ENDPOINT} + if str(team.id) not in (settings.ELEMENT_CHAIN_AS_STRING_EXCLUDED_TEAMS or []): config["elementsChainAsString"] = True @@ -124,10 +152,14 @@ def build_config(self): # TODO: Support the domain based check for recordings (maybe do it client side)? if team.session_recording_opt_in: - sample_rate = team.session_recording_sample_rate or None + capture_console_logs = True if team.capture_console_log_opt_in else False + sample_rate = str(team.session_recording_sample_rate) if team.session_recording_sample_rate else None + if sample_rate == "1.00": sample_rate = None + minimum_duration = team.session_recording_minimum_duration_milliseconds or None + linked_flag = None linked_flag_config = team.session_recording_linked_flag or None if isinstance(linked_flag_config, dict): @@ -138,17 +170,30 @@ def build_config(self): else: linked_flag = linked_flag_key + rrweb_script_config = None + + if (settings.SESSION_REPLAY_RRWEB_SCRIPT is not None) and ( + "*" in settings.SESSION_REPLAY_RRWEB_SCRIPT_ALLOWED_TEAMS + or str(team.id) in settings.SESSION_REPLAY_RRWEB_SCRIPT_ALLOWED_TEAMS + ): + rrweb_script_config = { + "script": settings.SESSION_REPLAY_RRWEB_SCRIPT, + } + session_recording_config_response = { "endpoint": "/s/", - "consoleLogRecordingEnabled": True if team.capture_console_log_opt_in else False, + "consoleLogRecordingEnabled": capture_console_logs, "recorderVersion": "v2", - "sampleRate": str(sample_rate) if sample_rate else None, - "minimumDurationMilliseconds": team.session_recording_minimum_duration_milliseconds or None, + "sampleRate": sample_rate, + "minimumDurationMilliseconds": minimum_duration, "linkedFlag": linked_flag, "networkPayloadCapture": team.session_recording_network_payload_capture_config or None, "urlTriggers": team.session_recording_url_trigger_config, "urlBlocklist": team.session_recording_url_blocklist_config, "eventTriggers": team.session_recording_event_trigger_config, + "scriptConfig": rrweb_script_config, + # NOTE: This is cached but stripped out at the api level depending on the caller + "domains": team.recording_domains or [], } if isinstance(team.session_replay_config, dict): @@ -161,6 +206,7 @@ def build_config(self): "canvasQuality": "0.4" if record_canvas else None, } ) + config["sessionRecording"] = session_recording_config_response # MARK: Quota limiting @@ -185,11 +231,7 @@ def build_config(self): if surveys_response["survey_config"]: config["survey_config"] = surveys_response["survey_config"] - try: - default_identified_only = team.pk >= int(settings.DEFAULT_IDENTIFIED_ONLY_TEAM_ID_MIN) - except Exception: - default_identified_only = False - config["defaultIdentifiedOnly"] = bool(default_identified_only) + config["defaultIdentifiedOnly"] = True # Support old SDK versions with setting that is now the default # MARK: Site apps - we want to eventually inline the JS but that will come later site_apps = [] @@ -202,9 +244,12 @@ def build_config(self): config["siteApps"] = site_apps + # Array of JS objects to be included when building the final JS + config["siteAppsJS"] = self._build_site_apps_js() + return config - def build_js_config(self): + def _build_site_apps_js(self): # NOTE: This is the web focused config for the frontend that includes site apps from posthog.plugins.site import get_site_apps_for_team, get_site_config_from_schema @@ -220,10 +265,11 @@ def build_js_config(self): f"\n{{\n id: '{site_app.token}',\n init: function(config) {{\n {indent_js(site_app.source, indent=4)}().inject({{ config:{json.dumps(config)}, posthog:config.posthog }});\n config.callback();\n }}\n}}" ) ) - - site_functions = HogFunction.objects.filter( - team=self.team, enabled=True, type__in=("site_destination", "site_app") - ).all() + site_functions = ( + HogFunction.objects.select_related("team") + .filter(team=self.team, enabled=True, type__in=("site_destination", "site_app")) + .all() + ) site_functions_js = [] @@ -242,16 +288,12 @@ def build_js_config(self): logger.exception(f"Failed to build JS for site function {site_function.id}") pass - js_content = f"""(function() {{ - window._POSTHOG_CONFIG = {json.dumps(self.config)}; - window._POSTHOG_JS_APPS = [{','.join(site_apps_js + site_functions_js)}]; -}})(); - """.strip() - - return js_content + return site_apps_js + site_functions_js @classmethod - def _get_via_cache(cls, token: str, suffix: str, fn: Callable[["RemoteConfig"], dict | str]) -> Any: + def _get_via_cache( + cls, token: str, suffix: str, fn: Callable[["RemoteConfig"], dict | str], timeout: int = CACHE_TIMEOUT + ) -> Any: key = cache_key_for_team_token(token, suffix) data = cache.get(key) @@ -267,34 +309,45 @@ def _get_via_cache(cls, token: str, suffix: str, fn: Callable[["RemoteConfig"], try: remote_config = cls.objects.select_related("team").get(team__api_token=token) except cls.DoesNotExist: - cache.set(key, "404", timeout=CACHE_TIMEOUT) + cache.set(key, "404", timeout=timeout) REMOTE_CONFIG_CACHE_COUNTER.labels(result="miss_but_missing").inc() raise data = fn(remote_config) - cache.set(key, data, timeout=CACHE_TIMEOUT) + cache.set(key, data, timeout=timeout) return data @classmethod - def get_config_via_token(cls, token: str) -> dict: - return cls._get_via_cache(token, "config", lambda remote_config: remote_config.build_config()) + def get_config_via_token(cls, token: str, request: Optional[HttpRequest] = None) -> dict: + config = cls._get_via_cache(token, "config", lambda remote_config: remote_config.build_config()) + config = sanitize_config_for_public_cdn(config, request=request) - @classmethod - def get_config_js_via_token(cls, token: str) -> str: - return cls._get_via_cache(token, "config.js", lambda remote_config: remote_config.build_js_config()) + return config @classmethod + def get_config_js_via_token(cls, token: str, request: Optional[HttpRequest] = None) -> str: + config = cls._get_via_cache(token, "config", lambda remote_config: remote_config.build_config()) + # Get the site apps JS so we can render it in the JS + site_apps_js = config.pop("siteAppsJS", None) + # We don't want to include the minimal site apps content as we have the JS now + config.pop("siteApps", None) + config = sanitize_config_for_public_cdn(config, request=request) + + js_content = f"""(function() {{ + window._POSTHOG_CONFIG = {json.dumps(config)}; + window._POSTHOG_JS_APPS = [{','.join(site_apps_js)}]; +}})(); + """.strip() + + return js_content + @classmethod - def get_array_js_via_token(cls, token: str) -> str: + def get_array_js_via_token(cls, token: str, request: Optional[HttpRequest] = None) -> str: # NOTE: Unlike the other methods we dont store this in the cache as it is cheap to build at runtime - data = cls.get_config_js_via_token(token) - - return f""" - {get_array_js_content()} + js_content = cls.get_config_js_via_token(token, request=request) - {data} - """ + return f"""{get_array_js_content()}\n\n{js_content}""" def sync(self): """ @@ -308,11 +361,8 @@ def sync(self): self.config = config cache.set(cache_key_for_team_token(self.team.api_token, "config"), config, timeout=CACHE_TIMEOUT) - cache.set( - cache_key_for_team_token(self.team.api_token, "config.js"), - self.build_js_config(), - timeout=CACHE_TIMEOUT, - ) + + self._purge_cdn() # TODO: Invalidate caches - in particular this will be the Cloudflare CDN cache self.synced_at = timezone.now() @@ -325,6 +375,37 @@ def sync(self): CELERY_TASK_REMOTE_CONFIG_SYNC.labels(result="failure").inc() raise + def _purge_cdn(self): + if ( + not settings.REMOTE_CONFIG_CDN_PURGE_ENDPOINT + or not settings.REMOTE_CONFIG_CDN_PURGE_TOKEN + or not settings.REMOTE_CONFIG_CDN_PURGE_DOMAINS + ): + return + + logger.info(f"Purging CDN for team {self.team_id}") + + data: dict[str, Any] = {"files": []} + + for domain in settings.REMOTE_CONFIG_CDN_PURGE_DOMAINS: + # Check if the domain starts with https:// and if not add it + full_domain = domain if domain.startswith("https://") else f"https://{domain}" + data["files"].append({"url": f"{full_domain}/array/{self.team.api_token}/config"}) + data["files"].append({"url": f"{full_domain}/array/{self.team.api_token}/config.js"}) + data["files"].append({"url": f"{full_domain}/array/{self.team.api_token}/array.js"}) + + try: + requests.post( + settings.REMOTE_CONFIG_CDN_PURGE_ENDPOINT, + headers={"Authorization": f"Bearer {settings.REMOTE_CONFIG_CDN_PURGE_TOKEN}"}, + data=data, + ) + except Exception: + logger.exception(f"Failed to purge CDN for team {self.team_id}") + REMOTE_CONFIG_CDN_PURGE_COUNTER.labels(result="failure").inc() + else: + REMOTE_CONFIG_CDN_PURGE_COUNTER.labels(result="success").inc() + def __str__(self): return f"RemoteConfig {self.team_id}" @@ -353,7 +434,7 @@ def site_app_saved(sender, instance: "PluginConfig", created, **kwargs): @receiver(post_save, sender=HogFunction) def site_function_saved(sender, instance: "HogFunction", created, **kwargs): - if instance.enabled and instance.type in ("site_destination", "site_app") and instance.transpiled: + if instance.enabled and instance.type in ("site_destination", "site_app"): _update_team_remote_config(instance.team_id) diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index b87ed07c84ee0e..40bea6d9e089cb 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -519,7 +519,7 @@ def __str__(self): return ", ".join(self.app_urls) return str(self.pk) - __repr__ = sane_repr("uuid", "name", "api_token") + __repr__ = sane_repr("id", "uuid", "project_id", "name", "api_token") @mutable_receiver(post_save, sender=Team) diff --git a/posthog/models/team/team_caching.py b/posthog/models/team/team_caching.py index 78042111a824cb..66309bd9399e23 100644 --- a/posthog/models/team/team_caching.py +++ b/posthog/models/team/team_caching.py @@ -38,6 +38,8 @@ def get_team_in_cache(token: str) -> Optional["Team"]: if team_data: try: parsed_data = json.loads(team_data) + if "project_id" not in parsed_data: + parsed_data["project_id"] = parsed_data["id"] return Team(**parsed_data) except Exception as e: capture_exception(e) diff --git a/posthog/models/test/test_integration_model.py b/posthog/models/test/test_integration_model.py index 456f085d9c2e93..d4184d9e0265a0 100644 --- a/posthog/models/test/test_integration_model.py +++ b/posthog/models/test/test_integration_model.py @@ -120,7 +120,7 @@ def test_authorize_url_with_additional_authorize_params(self): url = OauthIntegration.authorize_url("google-ads", next="/projects/test") assert ( url - == "https://accounts.google.com/o/oauth2/v2/auth?client_id=google-client-id&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fadwords+email&redirect_uri=https%3A%2F%2Flocalhost%3A8000%2Fintegrations%2Fgoogle-ads%2Fcallback&response_type=code&state=next%3D%252Fprojects%252Ftest&access_type=offline&prompt=consent" + == "https://accounts.google.com/o/oauth2/v2/auth?client_id=google-client-id&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fadwords+https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fuserinfo.email&redirect_uri=https%3A%2F%2Flocalhost%3A8000%2Fintegrations%2Fgoogle-ads%2Fcallback&response_type=code&state=next%3D%252Fprojects%252Ftest&access_type=offline&prompt=consent" ) @patch("posthog.models.integration.requests.post") @@ -199,6 +199,10 @@ def test_integration_fetches_info_from_token_info_url(self, mock_get, mock_post) "user": "user", "user_id": "user_id", "should_not": "be_saved", + "scopes": [ + "crm.objects.contacts.read", + "crm.objects.contacts.write", + ], } with freeze_time("2024-01-01T12:00:00Z"): @@ -219,6 +223,10 @@ def test_integration_fetches_info_from_token_info_url(self, mock_get, mock_post) "user": "user", "user_id": "user_id", "refreshed_at": 1704110400, + "scopes": [ + "crm.objects.contacts.read", + "crm.objects.contacts.write", + ], } assert integration.sensitive_config == { "access_token": "FAKES_ACCESS_TOKEN", diff --git a/posthog/models/test/test_remote_config.py b/posthog/models/test/test_remote_config.py index 2e118594f8b1f2..7bb985b78de6c3 100644 --- a/posthog/models/test/test_remote_config.py +++ b/posthog/models/test/test_remote_config.py @@ -1,5 +1,6 @@ from decimal import Decimal from unittest.mock import patch +from django.test import RequestFactory from inline_snapshot import snapshot import pytest from posthog.models.action.action import Action @@ -12,6 +13,8 @@ from posthog.test.base import BaseTest from django.core.cache import cache +CONFIG_REFRESH_QUERY_COUNT = 5 + class _RemoteConfigBase(BaseTest): remote_config: RemoteConfig @@ -26,6 +29,8 @@ def setUp(self): ) self.team = team self.team.api_token = "phc_12345" # Easier to test against + self.team.recording_domains = ["https://*.example.com"] + self.team.session_recording_opt_in = True self.team.save() # There will always be a config thanks to the signal @@ -45,14 +50,28 @@ def test_creates_remote_config_immediately(self): "heatmaps": False, "siteApps": [], "analytics": {"endpoint": "/i/v0/e/"}, + "siteAppsJS": [], "hasFeatureFlags": False, - "sessionRecording": False, + "sessionRecording": { + "domains": ["https://*.example.com"], + "endpoint": "/s/", + "linkedFlag": None, + "sampleRate": None, + "urlTriggers": [], + "scriptConfig": None, + "urlBlocklist": [], + "eventTriggers": [], + "recorderVersion": "v2", + "networkPayloadCapture": None, + "consoleLogRecordingEnabled": True, + "minimumDurationMilliseconds": None, + }, "captureDeadClicks": False, "capturePerformance": {"web_vitals": False, "network_timing": True, "web_vitals_allowed_metrics": None}, "autocapture_opt_out": False, "supportedCompression": ["gzip", "gzip-js"], "autocaptureExceptions": False, - "defaultIdentifiedOnly": False, + "defaultIdentifiedOnly": True, "elementsChainAsString": True, } ) @@ -112,6 +131,13 @@ def test_session_recording_sample_rate(self): self.remote_config.refresh_from_db() assert self.remote_config.config["sessionRecording"]["sampleRate"] == "0.50" + def test_session_recording_domains(self): + self.team.session_recording_opt_in = True + self.team.recording_domains = ["https://posthog.com", "https://*.posthog.com"] + self.team.save() + self.remote_config.refresh_from_db() + assert self.remote_config.config["sessionRecording"]["domains"] == self.team.recording_domains + class TestRemoteConfigSurveys(_RemoteConfigBase): # Largely copied from TestSurveysAPIList @@ -255,7 +281,60 @@ def setUp(self): self.remote_config.refresh_from_db() # Clear the cache so we are properly testing each flow assert cache.delete(cache_key_for_team_token(self.team.api_token, "config")) - assert cache.delete(cache_key_for_team_token(self.team.api_token, "config.js")) + + def _assert_matches_config(self, data): + assert data == snapshot( + { + "token": "phc_12345", + "supportedCompression": ["gzip", "gzip-js"], + "hasFeatureFlags": False, + "captureDeadClicks": False, + "capturePerformance": {"network_timing": True, "web_vitals": False, "web_vitals_allowed_metrics": None}, + "autocapture_opt_out": False, + "autocaptureExceptions": False, + "analytics": {"endpoint": "/i/v0/e/"}, + "elementsChainAsString": True, + "sessionRecording": { + "endpoint": "/s/", + "consoleLogRecordingEnabled": True, + "recorderVersion": "v2", + "sampleRate": None, + "minimumDurationMilliseconds": None, + "linkedFlag": None, + "networkPayloadCapture": None, + "urlTriggers": [], + "urlBlocklist": [], + "eventTriggers": [], + "scriptConfig": None, + }, + "heatmaps": False, + "surveys": [], + "defaultIdentifiedOnly": True, + "siteApps": [], + } + ) + + def _assert_matches_config_js(self, data): + assert data == snapshot( + """\ +(function() { + window._POSTHOG_CONFIG = {"token": "phc_12345", "supportedCompression": ["gzip", "gzip-js"], "hasFeatureFlags": false, "captureDeadClicks": false, "capturePerformance": {"network_timing": true, "web_vitals": false, "web_vitals_allowed_metrics": null}, "autocapture_opt_out": false, "autocaptureExceptions": false, "analytics": {"endpoint": "/i/v0/e/"}, "elementsChainAsString": true, "sessionRecording": {"endpoint": "/s/", "consoleLogRecordingEnabled": true, "recorderVersion": "v2", "sampleRate": null, "minimumDurationMilliseconds": null, "linkedFlag": null, "networkPayloadCapture": null, "urlTriggers": [], "urlBlocklist": [], "eventTriggers": [], "scriptConfig": null}, "heatmaps": false, "surveys": [], "defaultIdentifiedOnly": true}; + window._POSTHOG_JS_APPS = []; +})();\ +""" + ) + + def _assert_matches_config_array_js(self, data): + assert data == snapshot( + """\ +[MOCKED_ARRAY_JS_CONTENT] + +(function() { + window._POSTHOG_CONFIG = {"token": "phc_12345", "supportedCompression": ["gzip", "gzip-js"], "hasFeatureFlags": false, "captureDeadClicks": false, "capturePerformance": {"network_timing": true, "web_vitals": false, "web_vitals_allowed_metrics": null}, "autocapture_opt_out": false, "autocaptureExceptions": false, "analytics": {"endpoint": "/i/v0/e/"}, "elementsChainAsString": true, "sessionRecording": {"endpoint": "/s/", "consoleLogRecordingEnabled": true, "recorderVersion": "v2", "sampleRate": null, "minimumDurationMilliseconds": null, "linkedFlag": null, "networkPayloadCapture": null, "urlTriggers": [], "urlBlocklist": [], "eventTriggers": [], "scriptConfig": null}, "heatmaps": false, "surveys": [], "defaultIdentifiedOnly": true}; + window._POSTHOG_JS_APPS = []; +})();\ +""" + ) def test_syncs_if_changes(self): synced_at = self.remote_config.synced_at @@ -267,35 +346,42 @@ def test_persists_data_to_redis_on_sync(self): self.remote_config.config["surveys"] = True self.remote_config.sync() assert cache.get(cache_key_for_team_token(self.team.api_token, "config")) - assert cache.get(cache_key_for_team_token(self.team.api_token, "config.js")) def test_gets_via_redis_cache(self): - with self.assertNumQueries(3): + with self.assertNumQueries(CONFIG_REFRESH_QUERY_COUNT): data = RemoteConfig.get_config_via_token(self.team.api_token) - assert data == self.remote_config.config + self._assert_matches_config(data) with self.assertNumQueries(0): data = RemoteConfig.get_config_via_token(self.team.api_token) - assert data == self.remote_config.config + self._assert_matches_config(data) def test_gets_js_via_redis_cache(self): - with self.assertNumQueries(3): + with self.assertNumQueries(CONFIG_REFRESH_QUERY_COUNT): data = RemoteConfig.get_config_js_via_token(self.team.api_token) - - assert data == self.remote_config.build_js_config() + self._assert_matches_config_js(data) with self.assertNumQueries(0): data = RemoteConfig.get_config_js_via_token(self.team.api_token) + self._assert_matches_config_js(data) - assert data == self.remote_config.build_js_config() + def test_gets_js_reuses_config_cache(self): + with self.assertNumQueries(CONFIG_REFRESH_QUERY_COUNT): + RemoteConfig.get_config_via_token(self.team.api_token) + + with self.assertNumQueries(0): + data = RemoteConfig.get_config_js_via_token(self.team.api_token) + self._assert_matches_config_js(data) @patch("posthog.models.remote_config.get_array_js_content", return_value="[MOCKED_ARRAY_JS_CONTENT]") def test_gets_array_js_via_redis_cache(self, mock_get_array_js_content): - with self.assertNumQueries(3): - RemoteConfig.get_array_js_via_token(self.team.api_token) + with self.assertNumQueries(CONFIG_REFRESH_QUERY_COUNT): + data = RemoteConfig.get_array_js_via_token(self.team.api_token) + self._assert_matches_config_array_js(data) with self.assertNumQueries(0): - RemoteConfig.get_array_js_via_token(self.team.api_token) + data = RemoteConfig.get_array_js_via_token(self.team.api_token) + self._assert_matches_config_array_js(data) def test_caches_missing_response(self): with self.assertNumQueries(1): @@ -306,19 +392,89 @@ def test_caches_missing_response(self): with pytest.raises(RemoteConfig.DoesNotExist): RemoteConfig.get_array_js_via_token("missing-token") + def test_sanitizes_config_for_public_cdn(self): + config = self.remote_config.get_config_via_token(self.team.api_token) + # Ensure the domain and siteAppsJS are removed + assert config == snapshot( + { + "token": "phc_12345", + "supportedCompression": ["gzip", "gzip-js"], + "hasFeatureFlags": False, + "captureDeadClicks": False, + "capturePerformance": {"network_timing": True, "web_vitals": False, "web_vitals_allowed_metrics": None}, + "autocapture_opt_out": False, + "autocaptureExceptions": False, + "analytics": {"endpoint": "/i/v0/e/"}, + "elementsChainAsString": True, + "sessionRecording": { + "endpoint": "/s/", + "consoleLogRecordingEnabled": True, + "recorderVersion": "v2", + "sampleRate": None, + "minimumDurationMilliseconds": None, + "linkedFlag": None, + "networkPayloadCapture": None, + "urlTriggers": [], + "urlBlocklist": [], + "eventTriggers": [], + "scriptConfig": None, + }, + "heatmaps": False, + "surveys": [], + "defaultIdentifiedOnly": True, + "siteApps": [], + } + ) + + def test_only_includes_recording_for_approved_domains(self): + with self.assertNumQueries(CONFIG_REFRESH_QUERY_COUNT): + mock_request = RequestFactory().get("/") + mock_request.META["HTTP_ORIGIN"] = "https://my.example.com" + config = self.remote_config.get_config_via_token(self.team.api_token, request=mock_request) + assert config["sessionRecording"] + + # No additional queries should be needed to check the other domain + with self.assertNumQueries(0): + mock_request = RequestFactory().get("/") + mock_request.META["HTTP_ORIGIN"] = "https://other.com" + config = self.remote_config.get_config_via_token(self.team.api_token, request=mock_request) + assert not config["sessionRecording"] + + @patch("posthog.models.remote_config.requests.post") + def test_purges_cdn_cache_on_sync(self, mock_post): + with self.settings( + REMOTE_CONFIG_CDN_PURGE_ENDPOINT="https://api.cloudflare.com/client/v4/zones/MY_ZONE_ID/purge_cache", + REMOTE_CONFIG_CDN_PURGE_TOKEN="MY_TOKEN", + REMOTE_CONFIG_CDN_PURGE_DOMAINS=["cdn.posthog.com", "https://cdn2.posthog.com"], + ): + self.remote_config.sync() + mock_post.assert_called_once_with( + "https://api.cloudflare.com/client/v4/zones/MY_ZONE_ID/purge_cache", + headers={"Authorization": "Bearer MY_TOKEN"}, + data={ + "files": [ + {"url": "https://cdn.posthog.com/array/phc_12345/config"}, + {"url": "https://cdn.posthog.com/array/phc_12345/config.js"}, + {"url": "https://cdn.posthog.com/array/phc_12345/array.js"}, + {"url": "https://cdn2.posthog.com/array/phc_12345/config"}, + {"url": "https://cdn2.posthog.com/array/phc_12345/config.js"}, + {"url": "https://cdn2.posthog.com/array/phc_12345/array.js"}, + ] + }, + ) + class TestRemoteConfigJS(_RemoteConfigBase): def test_renders_js_including_config(self): # NOTE: This is a very basic test to check that the JS is rendered correctly # It doesn't check the actual contents of the JS, as that changes often but checks some general things - js = self.remote_config.build_config() - js = self.remote_config.build_js_config() + js = self.remote_config.get_config_js_via_token(self.team.api_token) # TODO: Come up with a good way of solidly testing this... assert js == snapshot( """\ (function() { - window._POSTHOG_CONFIG = {"token": "phc_12345", "surveys": [], "heatmaps": false, "siteApps": [], "analytics": {"endpoint": "/i/v0/e/"}, "hasFeatureFlags": false, "sessionRecording": false, "captureDeadClicks": false, "capturePerformance": {"web_vitals": false, "network_timing": true, "web_vitals_allowed_metrics": null}, "autocapture_opt_out": false, "supportedCompression": ["gzip", "gzip-js"], "autocaptureExceptions": false, "defaultIdentifiedOnly": false, "elementsChainAsString": true}; + window._POSTHOG_CONFIG = {"token": "phc_12345", "supportedCompression": ["gzip", "gzip-js"], "hasFeatureFlags": false, "captureDeadClicks": false, "capturePerformance": {"network_timing": true, "web_vitals": false, "web_vitals_allowed_metrics": null}, "autocapture_opt_out": false, "autocaptureExceptions": false, "analytics": {"endpoint": "/i/v0/e/"}, "elementsChainAsString": true, "sessionRecording": {"endpoint": "/s/", "consoleLogRecordingEnabled": true, "recorderVersion": "v2", "sampleRate": null, "minimumDurationMilliseconds": null, "linkedFlag": null, "networkPayloadCapture": null, "urlTriggers": [], "urlBlocklist": [], "eventTriggers": [], "scriptConfig": null}, "heatmaps": false, "surveys": [], "defaultIdentifiedOnly": true}; window._POSTHOG_JS_APPS = []; })();\ """ @@ -355,14 +511,13 @@ def test_renders_js_including_site_apps(self): plugin_configs[2].enabled = False - self.remote_config.build_config() - js = self.remote_config.build_js_config() + js = self.remote_config.get_config_js_via_token(self.team.api_token) # TODO: Come up with a good way of solidly testing this, ideally by running it in an actual browser environment assert js == snapshot( """\ (function() { - window._POSTHOG_CONFIG = {"token": "phc_12345", "surveys": [], "heatmaps": false, "siteApps": [], "analytics": {"endpoint": "/i/v0/e/"}, "hasFeatureFlags": false, "sessionRecording": false, "captureDeadClicks": false, "capturePerformance": {"web_vitals": false, "network_timing": true, "web_vitals_allowed_metrics": null}, "autocapture_opt_out": false, "supportedCompression": ["gzip", "gzip-js"], "autocaptureExceptions": false, "defaultIdentifiedOnly": false, "elementsChainAsString": true}; + window._POSTHOG_CONFIG = {"token": "phc_12345", "supportedCompression": ["gzip", "gzip-js"], "hasFeatureFlags": false, "captureDeadClicks": false, "capturePerformance": {"network_timing": true, "web_vitals": false, "web_vitals_allowed_metrics": null}, "autocapture_opt_out": false, "autocaptureExceptions": false, "analytics": {"endpoint": "/i/v0/e/"}, "elementsChainAsString": true, "sessionRecording": {"endpoint": "/s/", "consoleLogRecordingEnabled": true, "recorderVersion": "v2", "sampleRate": null, "minimumDurationMilliseconds": null, "linkedFlag": null, "networkPayloadCapture": null, "urlTriggers": [], "urlBlocklist": [], "eventTriggers": [], "scriptConfig": null}, "heatmaps": false, "surveys": [], "defaultIdentifiedOnly": true}; window._POSTHOG_JS_APPS = [ { id: 'tokentoken', @@ -391,7 +546,7 @@ def test_renders_js_including_site_apps(self): def test_renders_js_including_site_functions(self): non_site_app = HogFunction.objects.create( - name="Test", + name="Non site app", type=HogFunctionType.DESTINATION, team=self.team, enabled=True, @@ -402,7 +557,7 @@ def test_renders_js_including_site_functions(self): ) site_destination = HogFunction.objects.create( - name="Test", + name="Site destination", type=HogFunctionType.SITE_DESTINATION, team=self.team, enabled=True, @@ -413,14 +568,13 @@ def test_renders_js_including_site_functions(self): ) site_app = HogFunction.objects.create( - name="Test", + name="Site app", type=HogFunctionType.SITE_APP, team=self.team, enabled=True, ) - self.remote_config.build_config() - js = self.remote_config.build_js_config() + js = self.remote_config.get_config_js_via_token(self.team.api_token) assert str(non_site_app.id) not in js assert str(site_destination.id) in js assert str(site_app.id) in js @@ -433,7 +587,7 @@ def test_renders_js_including_site_functions(self): assert js == snapshot( """\ (function() { - window._POSTHOG_CONFIG = {"token": "phc_12345", "surveys": [], "heatmaps": false, "siteApps": [], "analytics": {"endpoint": "/i/v0/e/"}, "hasFeatureFlags": false, "sessionRecording": false, "captureDeadClicks": false, "capturePerformance": {"web_vitals": false, "network_timing": true, "web_vitals_allowed_metrics": null}, "autocapture_opt_out": false, "supportedCompression": ["gzip", "gzip-js"], "autocaptureExceptions": false, "defaultIdentifiedOnly": false, "elementsChainAsString": true}; + window._POSTHOG_CONFIG = {"token": "phc_12345", "supportedCompression": ["gzip", "gzip-js"], "hasFeatureFlags": false, "captureDeadClicks": false, "capturePerformance": {"network_timing": true, "web_vitals": false, "web_vitals_allowed_metrics": null}, "autocapture_opt_out": false, "autocaptureExceptions": false, "analytics": {"endpoint": "/i/v0/e/"}, "elementsChainAsString": true, "sessionRecording": {"endpoint": "/s/", "consoleLogRecordingEnabled": true, "recorderVersion": "v2", "sampleRate": null, "minimumDurationMilliseconds": null, "linkedFlag": null, "networkPayloadCapture": null, "urlTriggers": [], "urlBlocklist": [], "eventTriggers": [], "scriptConfig": null}, "heatmaps": false, "surveys": [], "defaultIdentifiedOnly": true}; window._POSTHOG_JS_APPS = [ { id: 'SITE_DESTINATION_ID', @@ -552,7 +706,8 @@ def test_renders_js_including_site_functions(self): const filterGlobals = { ...globals.groups, ...globals.event, person: globals.person, inputs, pdi: { distinct_id: globals.event.distinct_id, person: globals.person } }; let __getGlobal = (key) => filterGlobals[key]; const filterMatches = !!(!!(!ilike(__getProperty(__getProperty(__getGlobal("person"), "properties", true), "email", true), "%@posthog.com%") && ((!match(toString(__getProperty(__getGlobal("properties"), "$host", true)), "^(localhost|127\\\\.0\\\\.0\\\\.1)($|:)")) ?? 1) && (__getGlobal("event") == "$pageview"))); - if (filterMatches) { source.onEvent({ ...globals, inputs, posthog }); } + if (!filterMatches) { return; } + ; } } @@ -560,7 +715,12 @@ def test_renders_js_including_site_functions(self): const posthog = config.posthog; const callback = config.callback; if ('onLoad' in source) { - const r = source.onLoad({ inputs: buildInputs({}, true), posthog: posthog }); + const globals = { + person: { + properties: posthog.get_property('$stored_person_properties'), + } + } + const r = source.onLoad({ inputs: buildInputs(globals, true), posthog: posthog }); if (r && typeof r.then === 'function' && typeof r.finally === 'function') { r.catch(() => callback(false)).then(() => callback(true)) } else { callback(true) } } else { callback(true); @@ -592,7 +752,8 @@ def test_renders_js_including_site_functions(self): const filterGlobals = { ...globals.groups, ...globals.event, person: globals.person, inputs, pdi: { distinct_id: globals.event.distinct_id, person: globals.person } }; let __getGlobal = (key) => filterGlobals[key]; const filterMatches = true; - if (filterMatches) { source.onEvent({ ...globals, inputs, posthog }); } + if (!filterMatches) { return; } + ; } } @@ -600,7 +761,12 @@ def test_renders_js_including_site_functions(self): const posthog = config.posthog; const callback = config.callback; if ('onLoad' in source) { - const r = source.onLoad({ inputs: buildInputs({}, true), posthog: posthog }); + const globals = { + person: { + properties: posthog.get_property('$stored_person_properties'), + } + } + const r = source.onLoad({ inputs: buildInputs(globals, true), posthog: posthog }); if (r && typeof r.then === 'function' && typeof r.finally === 'function') { r.catch(() => callback(false)).then(() => callback(true)) } else { callback(true) } } else { callback(true); diff --git a/posthog/queries/test/__snapshots__/test_trends.ambr b/posthog/queries/test/__snapshots__/test_trends.ambr index 01ab1c2e0e23e4..63cc1b57cf4e86 100644 --- a/posthog/queries/test/__snapshots__/test_trends.ambr +++ b/posthog/queries/test/__snapshots__/test_trends.ambr @@ -859,18 +859,6 @@ # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.1 - ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, count(*) as count @@ -885,6 +873,58 @@ OFFSET 0 ''' # --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.1 + ''' + + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-04 23:59:59', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(8) + UNION ALL SELECT toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['other_value', '$$_posthog_breakdown_null_$$', 'value'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT pdi.person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['other_value', '$$_posthog_breakdown_null_$$', 'value']), (['other_value', '$$_posthog_breakdown_null_$$', 'value']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e SAMPLE 1.0 + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 99999 + AND toTimeZone(timestamp, 'UTC') >= toDateTime(toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-04 23:59:59', 'UTC') + AND ((event = 'sign up')) + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ''' +# --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.10 ''' /* celery:posthog.tasks.tasks.sync_insight_caching_state */ @@ -1048,6 +1088,22 @@ # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2 ''' + SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, + count(*) as count + FROM events e SAMPLE 1.0 + WHERE team_id = 99999 + AND event = 'sign up' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2019-12-28 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-04 23:59:59', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.3 + ''' + SELECT groupArray(day_start) as date, groupArray(count) AS total, breakdown_value @@ -1084,9 +1140,9 @@ GROUP BY distinct_id HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id WHERE e.team_id = 99999 + AND event = 'sign up' AND toTimeZone(timestamp, 'UTC') >= toDateTime(toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), 'UTC') AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-04 23:59:59', 'UTC') - AND ((event = 'sign up')) GROUP BY day_start, breakdown_value)) GROUP BY day_start, @@ -1097,22 +1153,6 @@ ORDER BY breakdown_value ''' # --- -# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.3 - ''' - - SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, - count(*) as count - FROM events e SAMPLE 1.0 - WHERE team_id = 99999 - AND event = 'sign up' - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2019-12-28 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-04 23:59:59', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.4 ''' @@ -1718,18 +1758,6 @@ # --- # name: TestTrends.test_person_filtering_in_cohort_in_action ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: TestTrends.test_person_filtering_in_cohort_in_action.1 - ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, count(*) as count @@ -1763,7 +1791,7 @@ OFFSET 0 ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action.2 +# name: TestTrends.test_person_filtering_in_cohort_in_action.1 ''' SELECT groupArray(day_start) as date, @@ -1827,19 +1855,71 @@ ORDER BY breakdown_value ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2 +# name: TestTrends.test_person_filtering_in_cohort_in_action.2 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-04 23:59:59', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(8) + UNION ALL SELECT toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['$$_posthog_breakdown_null_$$', 'value', 'other_value'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(*) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['$$_posthog_breakdown_null_$$', 'value', 'other_value']), (['$$_posthog_breakdown_null_$$', 'value', 'other_value']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 99999 + AND toTimeZone(timestamp, 'UTC') >= toDateTime(toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-04 23:59:59', 'UTC') + AND ((event = 'sign up' + AND (pdi.person_id IN + (SELECT id + FROM person + WHERE team_id = 99999 + AND id IN + (SELECT id + FROM person + WHERE team_id = 99999 + AND ((has(['some_val'], replaceRegexpAll(JSONExtractRaw(properties, '$some_prop'), '^"|"$', '')))) ) + GROUP BY id + HAVING max(is_deleted) = 0 + AND ((has(['some_val'], replaceRegexpAll(JSONExtractRaw(argMax(person.properties, version), '$some_prop'), '^"|"$', '')))) SETTINGS optimize_aggregation_in_order = 1)))) + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ''' # --- -# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.1 +# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2 ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, @@ -1875,6 +1955,72 @@ OFFSET 0 ''' # --- +# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.1 + ''' + + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-04 23:59:59', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(8) + UNION ALL SELECT toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['$$_posthog_breakdown_null_$$', 'value', 'other_value'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(*) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['$$_posthog_breakdown_null_$$', 'value', 'other_value']), (['$$_posthog_breakdown_null_$$', 'value', 'other_value']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0)) AS overrides ON e.distinct_id = overrides.distinct_id + WHERE e.team_id = 99999 + AND toTimeZone(timestamp, 'UTC') >= toDateTime(toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-04 23:59:59', 'UTC') + AND notEmpty(e.person_id) + AND ((event = 'sign up' + AND (if(notEmpty(overrides.distinct_id), overrides.person_id, e.person_id) IN + (SELECT id + FROM person + WHERE team_id = 99999 + AND id IN + (SELECT id + FROM person + WHERE team_id = 99999 + AND ((has(['some_val'], replaceRegexpAll(JSONExtractRaw(properties, '$some_prop'), '^"|"$', '')))) ) + GROUP BY id + HAVING max(is_deleted) = 0 + AND ((has(['some_val'], replaceRegexpAll(JSONExtractRaw(argMax(person.properties, version), '$some_prop'), '^"|"$', '')))) SETTINGS optimize_aggregation_in_order = 1)))) + AND notEmpty(e.person_id) + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ''' +# --- # name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.2 ''' @@ -4232,14 +4378,28 @@ # --- # name: TestTrends.test_trends_any_event_total_count ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + + SELECT groupArray(day_start) as date, + groupArray(count) AS total + FROM + (SELECT SUM(total) AS count, + day_start + FROM + (SELECT toUInt16(0) AS total, + toStartOfDay(toDateTime('2020-01-04 23:59:59', 'UTC')) - toIntervalDay(number) AS day_start + FROM numbers(dateDiff('day', toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), toDateTime('2020-01-04 23:59:59', 'UTC'))) + UNION ALL SELECT toUInt16(0) AS total, + toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')) + UNION ALL SELECT count(*) AS total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) AS date + FROM events e + WHERE team_id = 99999 + AND 1 = 1 + AND toTimeZone(timestamp, 'UTC') >= toDateTime(toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-04 23:59:59', 'UTC') + GROUP BY date) + GROUP BY day_start + ORDER BY day_start) ''' # --- # name: TestTrends.test_trends_any_event_total_count.1 @@ -4260,7 +4420,7 @@ toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) AS date FROM events e WHERE team_id = 99999 - AND 1 = 1 + AND event = 'sign up' AND toTimeZone(timestamp, 'UTC') >= toDateTime(toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), 'UTC') AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-04 23:59:59', 'UTC') GROUP BY date) @@ -4296,18 +4456,6 @@ # --- # name: TestTrends.test_trends_breakdown_cumulative ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: TestTrends.test_trends_breakdown_cumulative.1 - ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, count(*) as count @@ -4322,7 +4470,7 @@ OFFSET 0 ''' # --- -# name: TestTrends.test_trends_breakdown_cumulative.2 +# name: TestTrends.test_trends_breakdown_cumulative.1 ''' SELECT groupArray(day_start) as date, @@ -4382,19 +4530,67 @@ ORDER BY breakdown_value ''' # --- -# name: TestTrends.test_trends_breakdown_cumulative_poe_v2 +# name: TestTrends.test_trends_breakdown_cumulative.2 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-04 23:59:59', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(8) + UNION ALL SELECT toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['$$_posthog_breakdown_null_$$', 'value', 'other_value'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['$$_posthog_breakdown_null_$$', 'value', 'other_value']), (['$$_posthog_breakdown_null_$$', 'value', 'other_value']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 99999 + AND event = 'sign up' + AND toTimeZone(timestamp, 'UTC') >= toDateTime(toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-04 23:59:59', 'UTC') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ''' # --- -# name: TestTrends.test_trends_breakdown_cumulative_poe_v2.1 +# name: TestTrends.test_trends_breakdown_cumulative_poe_v2 ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, @@ -4418,6 +4614,68 @@ OFFSET 0 ''' # --- +# name: TestTrends.test_trends_breakdown_cumulative_poe_v2.1 + ''' + + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-04 23:59:59', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(8) + UNION ALL SELECT toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['$$_posthog_breakdown_null_$$', 'value', 'other_value'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT if(notEmpty(overrides.distinct_id), overrides.person_id, e.person_id) as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['$$_posthog_breakdown_null_$$', 'value', 'other_value']), (['$$_posthog_breakdown_null_$$', 'value', 'other_value']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0)) AS overrides ON e.distinct_id = overrides.distinct_id + WHERE e.team_id = 99999 + AND event = 'sign up' + AND toTimeZone(timestamp, 'UTC') >= toDateTime(toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-04 23:59:59', 'UTC') + AND notEmpty(e.person_id) + AND notEmpty(e.person_id) ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ''' +# --- # name: TestTrends.test_trends_breakdown_cumulative_poe_v2.2 ''' @@ -4596,18 +4854,6 @@ # --- # name: TestTrends.test_trends_compare_day_interval_relative_range ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: TestTrends.test_trends_compare_day_interval_relative_range.1 - ''' SELECT groupArray(day_start) as date, groupArray(count) AS total @@ -4632,7 +4878,7 @@ ORDER BY day_start) ''' # --- -# name: TestTrends.test_trends_compare_day_interval_relative_range.2 +# name: TestTrends.test_trends_compare_day_interval_relative_range.1 ''' SELECT groupArray(day_start) as date, @@ -4658,6 +4904,32 @@ ORDER BY day_start) ''' # --- +# name: TestTrends.test_trends_compare_day_interval_relative_range.2 + ''' + + SELECT groupArray(day_start) as date, + groupArray(count) AS total + FROM + (SELECT SUM(total) AS count, + day_start + FROM + (SELECT toUInt16(0) AS total, + toStartOfDay(toDateTime('2020-01-04 23:59:59', 'UTC')) - toIntervalDay(number) AS day_start + FROM numbers(dateDiff('day', toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), toDateTime('2020-01-04 23:59:59', 'UTC'))) + UNION ALL SELECT toUInt16(0) AS total, + toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')) + UNION ALL SELECT count(*) AS total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) AS date + FROM events e + WHERE team_id = 99999 + AND event = 'sign up' + AND toTimeZone(timestamp, 'UTC') >= toDateTime(toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-04 23:59:59', 'UTC') + GROUP BY date) + GROUP BY day_start + ORDER BY day_start) + ''' +# --- # name: TestTrends.test_trends_compare_day_interval_relative_range.3 ''' @@ -4967,14 +5239,28 @@ # --- # name: TestTrends.test_trends_per_day_cumulative ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + + SELECT groupArray(day_start) as date, + groupArray(count) AS total + FROM + (SELECT SUM(total) AS count, + day_start + FROM + (SELECT toUInt16(0) AS total, + toStartOfDay(toDateTime('2020-01-04 23:59:59', 'UTC')) - toIntervalDay(number) AS day_start + FROM numbers(dateDiff('day', toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), toDateTime('2020-01-04 23:59:59', 'UTC'))) + UNION ALL SELECT toUInt16(0) AS total, + toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')) + UNION ALL SELECT count(*) AS total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) AS date + FROM events e + WHERE team_id = 99999 + AND event = 'sign up' + AND toTimeZone(timestamp, 'UTC') >= toDateTime(toStartOfDay(toDateTime('2019-12-28 00:00:00', 'UTC')), 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-04 23:59:59', 'UTC') + GROUP BY date) + GROUP BY day_start + ORDER BY day_start) ''' # --- # name: TestTrends.test_trends_per_day_cumulative.1 diff --git a/posthog/rbac/test/test_user_access_control.py b/posthog/rbac/test/test_user_access_control.py index d398f246589a59..d17de05dd7b446 100644 --- a/posthog/rbac/test/test_user_access_control.py +++ b/posthog/rbac/test/test_user_access_control.py @@ -281,7 +281,9 @@ def test_filters_project_queryset_based_on_acs_always_allows_org_admin(self): filtered_teams = list( self.user_access_control.filter_queryset_by_access_level(Team.objects.all(), include_all_if_admin=True) ) - assert filtered_teams == [self.team, team2, team3] + assert sorted(filtered_teams, key=lambda team: team.id) == sorted( + [self.team, team2, team3], key=lambda team: team.id + ) def test_organization_access_control(self): # A team isn't always available like for organization level routing diff --git a/posthog/schema.py b/posthog/schema.py index e01ba99563750d..4bc3d81952e91f 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -420,6 +420,7 @@ class BaseMathType(StrEnum): MONTHLY_ACTIVE = "monthly_active" UNIQUE_SESSION = "unique_session" FIRST_TIME_FOR_USER = "first_time_for_user" + FIRST_MATCHING_EVENT_FOR_USER = "first_matching_event_for_user" class BreakdownAttributionType(StrEnum): @@ -5823,6 +5824,9 @@ class TrendsQuery(BaseModel): aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation") breakdownFilter: Optional[BreakdownFilter] = Field(default=None, description="Breakdown of the events and actions") compareFilter: Optional[CompareFilter] = Field(default=None, description="Compare to date range") + conversionGoal: Optional[Union[ActionConversionGoal, CustomEventConversionGoal]] = Field( + default=None, description="Whether we should be comparing against a specific conversion goal" + ) dateRange: Optional[InsightDateRange] = Field(default=None, description="Date range for the query") filterTestAccounts: Optional[bool] = Field( default=False, description="Exclude internal and test users by applying the respective filters" @@ -5889,6 +5893,7 @@ class CachedExperimentTrendsQueryResponse(BaseModel): ) significance_code: ExperimentSignificanceCode significant: bool + stats_version: Optional[int] = None timezone: str variants: list[ExperimentVariantTrendsBaseStats] @@ -5906,6 +5911,7 @@ class Response10(BaseModel): probability: dict[str, float] significance_code: ExperimentSignificanceCode significant: bool + stats_version: Optional[int] = None variants: list[ExperimentVariantTrendsBaseStats] @@ -6010,6 +6016,7 @@ class ExperimentTrendsQueryResponse(BaseModel): probability: dict[str, float] significance_code: ExperimentSignificanceCode significant: bool + stats_version: Optional[int] = None variants: list[ExperimentVariantTrendsBaseStats] @@ -6338,6 +6345,7 @@ class QueryResponseAlternative16(BaseModel): probability: dict[str, float] significance_code: ExperimentSignificanceCode significant: bool + stats_version: Optional[int] = None variants: list[ExperimentVariantTrendsBaseStats] @@ -6369,6 +6377,7 @@ class QueryResponseAlternative27(BaseModel): probability: dict[str, float] significance_code: ExperimentSignificanceCode significant: bool + stats_version: Optional[int] = None variants: list[ExperimentVariantTrendsBaseStats] @@ -6552,6 +6561,7 @@ class ExperimentTrendsQuery(BaseModel): ) name: Optional[str] = None response: Optional[ExperimentTrendsQueryResponse] = None + stats_version: Optional[int] = None class FunnelPathsFilter(BaseModel): diff --git a/posthog/session_recordings/queries/session_recording_list_from_query.py b/posthog/session_recordings/queries/session_recording_list_from_query.py index c75d5412fcb0f5..d53c99682edadc 100644 --- a/posthog/session_recordings/queries/session_recording_list_from_query.py +++ b/posthog/session_recordings/queries/session_recording_list_from_query.py @@ -303,7 +303,10 @@ def _where_predicates(self) -> Union[ast.And, ast.Or]: if events_sub_query: optional_exprs.append( ast.CompareOperation( - op=ast.CompareOperationOp.In, + # this hits the distributed events table from the distributed session_replay_events table + # so we should use GlobalIn + # see https://clickhouse.com/docs/en/sql-reference/operators/in#distributed-subqueries + op=ast.CompareOperationOp.GlobalIn, left=ast.Field(chain=["s", "session_id"]), right=events_sub_query, ) @@ -498,7 +501,10 @@ def get_operation(self) -> CompareOperation | None: if poe_is_active(self._team): return ast.CompareOperation( - op=ast.CompareOperationOp.In, + # this hits the distributed events table from the distributed session_replay_events table + # so we should use GlobalIn + # see https://clickhouse.com/docs/en/sql-reference/operators/in#distributed-subqueries + op=ast.CompareOperationOp.GlobalIn, left=ast.Field(chain=["session_id"]), right=q, ) diff --git a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr index 610866ee9dc7b5..df35cd7773af40 100644 --- a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr +++ b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr @@ -19,12 +19,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2023-01-03 23:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-04 00:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-31 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'custom-event'), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0), ifNull(equals(nullIf(nullIf(events.`$session_id`, ''), 'null'), 'test_action_filter-session-one'), 0), ifNull(equals(nullIf(nullIf(events.`$window_id`, ''), 'null'), 'test_action_filter-window-id'), 0)))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['custom-event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-04 00:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-31 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'custom-event'), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0), ifNull(equals(nullIf(nullIf(events.`$session_id`, ''), 'null'), 'test_action_filter-session-one'), 0), ifNull(equals(nullIf(nullIf(events.`$window_id`, ''), 'null'), 'test_action_filter-window-id'), 0)))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['custom-event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -59,12 +59,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2023-01-03 23:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-04 00:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-31 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'custom-event'), and(ifNull(equals(nullIf(nullIf(events.`$session_id`, ''), 'null'), 'test_action_filter-session-one'), 0), ifNull(equals(nullIf(nullIf(events.`$window_id`, ''), 'null'), 'test_action_filter-window-id'), 0)))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['custom-event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-04 00:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-31 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'custom-event'), and(ifNull(equals(nullIf(nullIf(events.`$session_id`, ''), 'null'), 'test_action_filter-session-one'), 0), ifNull(equals(nullIf(nullIf(events.`$window_id`, ''), 'null'), 'test_action_filter-window-id'), 0)))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['custom-event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -99,12 +99,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2023-01-03 23:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-04 00:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-31 23:58:00.000000', 6, 'UTC')), and(and(equals(events.event, 'custom-event'), and(ifNull(equals(nullIf(nullIf(events.`$session_id`, ''), 'null'), 'test_action_filter-session-one'), 0), ifNull(equals(nullIf(nullIf(events.`$window_id`, ''), 'null'), 'test_action_filter-window-id'), 0))), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['custom-event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-04 00:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-31 23:58:00.000000', 6, 'UTC')), and(and(equals(events.event, 'custom-event'), and(ifNull(equals(nullIf(nullIf(events.`$session_id`, ''), 'null'), 'test_action_filter-session-one'), 0), ifNull(equals(nullIf(nullIf(events.`$window_id`, ''), 'null'), 'test_action_filter-window-id'), 0))), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['custom-event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -139,12 +139,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2023-01-03 23:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-04 00:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-31 23:58:00.000000', 6, 'UTC')), and(and(equals(events.event, 'custom-event'), and(ifNull(equals(nullIf(nullIf(events.`$session_id`, ''), 'null'), 'test_action_filter-session-one'), 0), ifNull(equals(nullIf(nullIf(events.`$window_id`, ''), 'null'), 'test_action_filter-window-id'), 0))), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['custom-event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-04 00:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-31 23:58:00.000000', 6, 'UTC')), and(and(equals(events.event, 'custom-event'), and(ifNull(equals(nullIf(nullIf(events.`$session_id`, ''), 'null'), 'test_action_filter-session-one'), 0), ifNull(equals(nullIf(nullIf(events.`$window_id`, ''), 'null'), 'test_action_filter-window-id'), 0))), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['custom-event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -179,21 +179,21 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-22 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-04 00:00:00.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-22 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-04 00:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-21 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-04 00:00:00.000000', 6, 'UTC')), or(equals(events.event, 'custom-event'), equals(events.event, '$pageview'))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'custom-event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-22 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-04 00:00:00.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-22 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-04 00:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-21 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-04 00:00:00.000000', 6, 'UTC')), or(equals(events.event, 'custom-event'), equals(events.event, '$pageview'))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'custom-event'])))) GROUP BY s.session_id HAVING ifNull(greater(duration, 60.0), 0) ORDER BY start_time DESC @@ -228,12 +228,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), 1) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), 1) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -268,12 +268,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(1, ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(1, ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -308,12 +308,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(1, ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0))) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(1, ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -348,12 +348,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), 1) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), 1) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -388,12 +388,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(1, ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0))) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(1, ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -428,12 +428,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(1, ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Firefox'), 0))) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(1, ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1133,12 +1133,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1173,12 +1173,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$autocapture')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$autocapture'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$autocapture')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$autocapture'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1213,12 +1213,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1288,12 +1288,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING ifNull(greater(duration, 60.0), 0) ORDER BY start_time DESC @@ -1328,12 +1328,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING ifNull(greater(active_seconds, 60.0), 0) ORDER BY start_time DESC @@ -1368,17 +1368,17 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'name'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___name, groups.group_type_index AS index, groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 99999), equals(index, 1)) - GROUP BY groups.group_type_index, groups.group_key) AS events__group_1 ON equals(events.`$group_1`, events__group_1.key) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(events__group_1.properties___name, 'org one'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'name'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___name, groups.group_type_index AS index, groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 99999), equals(index, 1)) + GROUP BY groups.group_type_index, groups.group_key) AS events__group_1 ON equals(events.`$group_1`, events__group_1.key) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(events__group_1.properties___name, 'org one'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1413,17 +1413,17 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'name'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___name, groups.group_type_index AS index, groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 99999), equals(index, 1)) - GROUP BY groups.group_type_index, groups.group_key) AS events__group_1 ON equals(events.`$group_1`, events__group_1.key) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__group_1.properties___name, 'org one'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'name'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___name, groups.group_type_index AS index, groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 99999), equals(index, 1)) + GROUP BY groups.group_type_index, groups.group_key) AS events__group_1 ON equals(events.`$group_1`, events__group_1.key) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__group_1.properties___name, 'org one'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1458,17 +1458,17 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'name'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___name, groups.group_type_index AS index, groups.group_key AS key - FROM groups - WHERE and(equals(groups.team_id, 99999), equals(index, 2)) - GROUP BY groups.group_type_index, groups.group_key) AS events__group_2 ON equals(events.`$group_2`, events__group_2.key) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__group_2.properties___name, 'org one'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'name'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___name, groups.group_type_index AS index, groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 99999), equals(index, 2)) + GROUP BY groups.group_type_index, groups.group_key) AS events__group_2 ON equals(events.`$group_2`, events__group_2.key) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__group_2.properties___name, 'org one'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1503,12 +1503,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1543,27 +1543,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1598,27 +1598,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1653,12 +1653,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1693,27 +1693,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1748,27 +1748,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1803,27 +1803,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(events__person.properties___email, 'bla'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(events__person.properties___email, 'bla'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1858,27 +1858,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(events__person.properties___email, 'something else'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(events__person.properties___email, 'something else'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1913,12 +1913,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1953,12 +1953,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1993,12 +1993,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2033,12 +2033,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Firefox'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2073,12 +2073,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2113,12 +2113,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$autocapture')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$autocapture'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$autocapture')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$autocapture'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2153,12 +2153,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2193,12 +2193,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2233,12 +2233,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'a_different_event'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['a_different_event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'a_different_event'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['a_different_event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2273,12 +2273,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'a_different_event'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Safari'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['a_different_event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'a_different_event'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Safari'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['a_different_event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2313,12 +2313,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2353,12 +2353,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Firefox'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2393,12 +2393,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'a_different_event'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['a_different_event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'a_different_event'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['a_different_event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2433,12 +2433,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'a_different_event'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Safari'), 0))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['a_different_event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'a_different_event'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Safari'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['a_different_event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2473,27 +2473,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0)), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1)) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0)), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1)) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2528,12 +2528,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2568,27 +2568,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), and(ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0)), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1)) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), and(ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0)), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1)) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2623,12 +2623,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2663,12 +2663,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, '$pageleave'))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageleave', '$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, '$pageleave'))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageleave', '$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3434,26 +3434,26 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), 0), and(in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-08-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), in(s.distinct_id, - (SELECT person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE and(equals(person_distinct_id2.team_id, 99999), in(person_distinct_id2.distinct_id, - (SELECT person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE and(equals(person_distinct_id2.team_id, 99999), 1, in(person_distinct_id2.person_id, - (SELECT cohortpeople.person_id AS person_id - FROM cohortpeople - WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))))))) - GROUP BY person_distinct_id2.distinct_id - HAVING and(ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0), in(person_distinct_id2.person_id, - (SELECT cohortpeople.person_id AS person_id - FROM cohortpeople - WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0))))))))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), 0), and(globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-08-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), in(s.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), in(person_distinct_id2.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), 1, in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))))))) + GROUP BY person_distinct_id2.distinct_id + HAVING and(ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0), in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0))))))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3488,26 +3488,26 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), 0), and(in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-08-17 23:58:00.000000', 6, 'UTC')), equals(events.event, 'custom_event')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['custom_event']))), in(s.distinct_id, - (SELECT person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE and(equals(person_distinct_id2.team_id, 99999), in(person_distinct_id2.distinct_id, - (SELECT person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE and(equals(person_distinct_id2.team_id, 99999), 1, in(person_distinct_id2.person_id, - (SELECT cohortpeople.person_id AS person_id - FROM cohortpeople - WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))))))) - GROUP BY person_distinct_id2.distinct_id - HAVING and(ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0), in(person_distinct_id2.person_id, - (SELECT cohortpeople.person_id AS person_id - FROM cohortpeople - WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0))))))))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), 0), and(globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-08-17 23:58:00.000000', 6, 'UTC')), equals(events.event, 'custom_event')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['custom_event']))), in(s.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), in(person_distinct_id2.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), 1, in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))))))) + GROUP BY person_distinct_id2.distinct_id + HAVING and(ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0), in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0))))))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3542,27 +3542,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla@gmail.com'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla@gmail.com'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3597,27 +3597,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(notILike(events__person.properties___email, '%gmail.com%'), 1)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(notILike(events__person.properties___email, '%gmail.com%'), 1)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3889,12 +3889,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'new-event'))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'new-event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'new-event'))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'new-event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3929,12 +3929,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'new-event2'))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'new-event2'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'new-event2'))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'new-event2'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3969,12 +3969,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'new-event2'))) - GROUP BY events.`$session_id` - HAVING hasAny(groupUniqArray(events.event), ['$pageview', 'new-event2'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'new-event2'))) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview', 'new-event2'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4009,12 +4009,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'bar'), ''), 'null'), '^"|"$', ''), 'foo'), 0)))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'bar'), ''), 'null'), '^"|"$', ''), 'foo'), 0)))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4049,12 +4049,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)), and(equals(events.event, 'new-event'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'new-event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)), and(equals(events.event, 'new-event'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'new-event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4089,12 +4089,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)), and(equals(events.event, 'new-event'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)))) - GROUP BY events.`$session_id` - HAVING hasAny(groupUniqArray(events.event), ['$pageview', 'new-event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)), and(equals(events.event, 'new-event'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)))) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview', 'new-event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4129,12 +4129,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'custom_event'))) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'custom_event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'custom_event'))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'custom_event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4169,12 +4169,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'custom_event'))) - GROUP BY events.`$session_id` - HAVING hasAny(groupUniqArray(events.event), ['$pageview', 'custom_event'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'custom_event'))) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview', 'custom_event'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4414,21 +4414,21 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAny(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4463,21 +4463,21 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAny(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4512,12 +4512,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, ['session_id_one']), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), in(events.`$session_id`, ['session_id_one'])) - GROUP BY events.`$session_id` - HAVING hasAny(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, ['session_id_one']), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), in(events.`$session_id`, ['session_id_one'])) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4552,12 +4552,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, ['session_id_two']), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), in(events.`$session_id`, ['session_id_two'])) - GROUP BY events.`$session_id` - HAVING hasAny(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, ['session_id_two']), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), in(events.`$session_id`, ['session_id_two'])) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4592,27 +4592,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(ifNull(equals(events__person.properties___email, 'test@posthog.com'), 0), ifNull(equals(events__person.properties___email, 'david@posthog.com'), 0))) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(ifNull(equals(events__person.properties___email, 'test@posthog.com'), 0), ifNull(equals(events__person.properties___email, 'david@posthog.com'), 0))) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4647,27 +4647,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(ifNull(equals(events__person.properties___email, 'test@posthog.com'), 0), ifNull(equals(events__person.properties___email, 'david@posthog.com'), 0))) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(ifNull(equals(events__person.properties___email, 'test@posthog.com'), 0), ifNull(equals(events__person.properties___email, 'david@posthog.com'), 0))) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4772,16 +4772,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4851,12 +4851,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4891,12 +4891,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(not(match(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$host'), ''), 'null'), '^"|"$', '')), '^(localhost|127\\.0\\.0\\.1)($|:)')), 1)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(not(match(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$host'), ''), 'null'), '^"|"$', '')), '^(localhost|127\\.0\\.0\\.1)($|:)')), 1)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4931,12 +4931,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4971,12 +4971,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(not(match(toString(nullIf(nullIf(events.`mat_$host`, ''), 'null')), '^(localhost|127\\.0\\.0\\.1)($|:)')), 1)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(not(match(toString(nullIf(nullIf(events.`mat_$host`, ''), 'null')), '^(localhost|127\\.0\\.0\\.1)($|:)')), 1)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5011,12 +5011,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5051,12 +5051,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5091,12 +5091,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5131,12 +5131,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5171,12 +5171,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5211,12 +5211,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5251,12 +5251,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5291,12 +5291,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5331,12 +5331,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5371,12 +5371,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'true'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'true'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5411,12 +5411,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5451,12 +5451,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'true'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'true'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5491,12 +5491,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5531,27 +5531,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5586,12 +5586,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5626,27 +5626,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5681,12 +5681,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5721,27 +5721,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5776,12 +5776,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) - GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -5816,27 +5816,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py index 0b9ea6e40d48c9..ae58b14213ed20 100644 --- a/posthog/session_recordings/session_recording_api.py +++ b/posthog/session_recordings/session_recording_api.py @@ -21,6 +21,7 @@ from rest_framework.response import Response from rest_framework.utils.encoders import JSONEncoder +import posthog.session_recordings.queries.session_recording_list_from_query from ee.session_recordings.session_summary.summarize_session import summarize_recording from posthog.api.person import MinimalPersonSerializer from posthog.api.routing import TeamAndOrgViewSetMixin @@ -360,31 +361,54 @@ def list(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: ) @action(methods=["GET"], detail=False) def matching_events(self, request: request.Request, *args: Any, **kwargs: Any) -> JsonResponse: - filter = SessionRecordingsFilter(request=request, team=self.team) + use_query_type = (request.GET.get("as_query", "False")).lower() == "true" - if not filter.session_ids or len(filter.session_ids) != 1: - raise exceptions.ValidationError( - "Must specify exactly one session_id", - ) + if use_query_type: + data_dict = query_as_params_to_dict(request.GET.dict()) + query = RecordingsQuery.model_validate(data_dict) + + # a little duplication for now + if not query.session_ids or len(query.session_ids) != 1: + raise exceptions.ValidationError( + "Must specify exactly one session_id", + ) - if not filter.events and not filter.actions: - raise exceptions.ValidationError( - "Must specify at least one event or action filter", + if not query.events and not query.actions: + raise exceptions.ValidationError( + "Must specify at least one event or action filter", + ) + + distinct_id = str(cast(User, request.user).distinct_id) + modifiers = safely_read_modifiers_overrides(distinct_id, self.team) + results, _, timings = ( + posthog.session_recordings.queries.session_recording_list_from_query.ReplayFiltersEventsSubQuery( + query=query, team=self.team, hogql_query_modifiers=modifiers + ).get_event_ids_for_session() ) + else: + filter = SessionRecordingsFilter(request=request, team=self.team) - distinct_id = str(cast(User, request.user).distinct_id) - modifiers = safely_read_modifiers_overrides(distinct_id, self.team) - matching_events_query_response = ReplayFiltersEventsSubQuery( - filter=filter, team=self.team, hogql_query_modifiers=modifiers - ).get_event_ids_for_session() + if not filter.session_ids or len(filter.session_ids) != 1: + raise exceptions.ValidationError( + "Must specify exactly one session_id", + ) + + if not filter.events and not filter.actions: + raise exceptions.ValidationError( + "Must specify at least one event or action filter", + ) + + distinct_id = str(cast(User, request.user).distinct_id) + modifiers = safely_read_modifiers_overrides(distinct_id, self.team) + results, _, timings = ReplayFiltersEventsSubQuery( + filter=filter, team=self.team, hogql_query_modifiers=modifiers + ).get_event_ids_for_session() - response = JsonResponse(data={"results": matching_events_query_response.results}) + response = JsonResponse(data={"results": results}) response.headers["Server-Timing"] = ", ".join( f"{key};dur={round(duration, ndigits=2)}" - for key, duration in _generate_timings( - matching_events_query_response.timings, ServerTimingsGathered() - ).items() + for key, duration in _generate_timings(timings, ServerTimingsGathered()).items() ) return response diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py index f3c7d7edfa38d7..5a474ade819be0 100644 --- a/posthog/session_recordings/test/test_session_recordings.py +++ b/posthog/session_recordings/test/test_session_recordings.py @@ -1155,6 +1155,45 @@ def test_get_matching_events_for_unknown_session(self) -> None: assert response.status_code == status.HTTP_200_OK assert response.json() == {"results": []} + def test_get_matching_events_with_query(self) -> None: + base_time = (now() - relativedelta(days=1)).replace(microsecond=0) + + # the matching session + session_id = f"test_get_matching_events-1-{uuid.uuid4()}" + self.produce_replay_summary("user", session_id, base_time) + event_id = _create_event( + event="$pageview", + properties={"$session_id": session_id}, + team=self.team, + distinct_id=uuid.uuid4(), + ) + + # a non-matching session + non_matching_session_id = f"test_get_matching_events-2-{uuid.uuid4()}" + self.produce_replay_summary("user", non_matching_session_id, base_time) + _create_event( + event="$pageview", + properties={"$session_id": non_matching_session_id}, + team=self.team, + distinct_id=uuid.uuid4(), + ) + + flush_persons_and_events() + # data needs time to settle :'( + time.sleep(1) + + query_params = [ + f'{SESSION_RECORDINGS_FILTER_IDS}=["{session_id}"]', + 'events=[{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}]', + ] + + response = self.client.get( + f"/api/projects/{self.team.id}/session_recordings/matching_events?{'&'.join(query_params)}&as_query=true" + ) + + assert response.status_code == status.HTTP_200_OK + assert response.json() == {"results": [event_id]} + def test_get_matching_events(self) -> None: base_time = (now() - relativedelta(days=1)).replace(microsecond=0) diff --git a/posthog/settings/web.py b/posthog/settings/web.py index de903f3eeb9172..49c68b0adb9780 100644 --- a/posthog/settings/web.py +++ b/posthog/settings/web.py @@ -36,13 +36,13 @@ DECIDE_SKIP_POSTGRES_FLAGS = get_from_env("DECIDE_SKIP_POSTGRES_FLAGS", False, type_cast=str_to_bool) +DECIDE_TOKENS_FOR_REMOTE_CONFIG = get_list(os.getenv("DECIDE_TOKENS_FOR_REMOTE_CONFIG", "")) + # Decide billing analytics DECIDE_BILLING_SAMPLING_RATE = get_from_env("DECIDE_BILLING_SAMPLING_RATE", 0.1, type_cast=float) DECIDE_BILLING_ANALYTICS_TOKEN = get_from_env("DECIDE_BILLING_ANALYTICS_TOKEN", None, type_cast=str, optional=True) -# temporary, used for safe rollout of defaulting people into anonymous events / process_persons: identified_only -DEFAULT_IDENTIFIED_ONLY_TEAM_ID_MIN: int = get_from_env("DEFAULT_IDENTIFIED_ONLY_TEAM_ID_MIN", 1000000, type_cast=int) # Decide regular request analytics # Takes 3 possible formats, all separated by commas: @@ -376,7 +376,15 @@ # Used only to display in the UI to inform users of allowlist options PUBLIC_EGRESS_IP_ADDRESSES = get_list(os.getenv("PUBLIC_EGRESS_IP_ADDRESSES", "")) -IMPERSONATION_TIMEOUT_SECONDS = get_from_env("IMPERSONATION_TIMEOUT_SECONDS", 15 * 60, type_cast=int) +# The total time allowed for an impersonated session +IMPERSONATION_TIMEOUT_SECONDS = get_from_env("IMPERSONATION_TIMEOUT_SECONDS", 60 * 60 * 2, type_cast=int) +# The time allowed for an impersonated session to be idle before it expires +IMPERSONATION_IDLE_TIMEOUT_SECONDS = get_from_env("IMPERSONATION_IDLE_TIMEOUT_SECONDS", 30 * 60, type_cast=int) +# Impersonation cookie last activity key +IMPERSONATION_COOKIE_LAST_ACTIVITY_KEY = get_from_env( + "IMPERSONATION_COOKIE_LAST_ACTIVITY_KEY", "impersonation_last_activity" +) + SESSION_COOKIE_CREATED_AT_KEY = get_from_env("SESSION_COOKIE_CREATED_AT_KEY", "session_created_at") PROJECT_SWITCHING_TOKEN_ALLOWLIST = get_list(os.getenv("PROJECT_SWITCHING_TOKEN_ALLOWLIST", "sTMFPsFhdP1Ssg")) @@ -390,3 +398,8 @@ # disables frontend side navigation hooks to make hot-reload work seamlessly DEV_DISABLE_NAVIGATION_HOOKS = get_from_env("DEV_DISABLE_NAVIGATION_HOOKS", False, type_cast=bool) + + +REMOTE_CONFIG_CDN_PURGE_ENDPOINT = get_from_env("REMOTE_CONFIG_CDN_PURGE_ENDPOINT", "") +REMOTE_CONFIG_CDN_PURGE_TOKEN = get_from_env("REMOTE_CONFIG_CDN_PURGE_TOKEN", "") +REMOTE_CONFIG_CDN_PURGE_DOMAINS = get_list(os.getenv("REMOTE_CONFIG_CDN_PURGE_DOMAINS", "")) diff --git a/posthog/tasks/alerts/checks.py b/posthog/tasks/alerts/checks.py index 4738ca45d3ade4..439305283b4cff 100644 --- a/posthog/tasks/alerts/checks.py +++ b/posthog/tasks/alerts/checks.py @@ -1,4 +1,3 @@ -import time import traceback from datetime import datetime, timedelta, UTC @@ -25,16 +24,15 @@ AlertState, ) from posthog.utils import get_from_dict_or_attr -from prometheus_client import Counter, Gauge from django.db.models import Q, F from collections import defaultdict from posthog.tasks.alerts.utils import ( AlertEvaluationResult, calculation_interval_to_order, + next_check_time, send_notifications_for_breaches, send_notifications_for_errors, WRAPPER_NODE_KINDS, - alert_calculation_interval_to_relativedelta, ) from posthog.tasks.alerts.trends import check_trends_alert from posthog.ph_client import ph_us_client @@ -54,26 +52,6 @@ def __init__(self, err: Exception): self.__traceback__ = err.__traceback__ -HOURLY_ALERTS_BACKLOG_GAUGE = Gauge( - "hourly_alerts_backlog", - "Number of hourly alerts that are not being checked in the last hour.", -) - -DAILY_ALERTS_BACKLOG_GAUGE = Gauge( - "daily_alerts_backlog", - "Number of daily alerts that are not being checked in the last 24 hours.", -) - -ALERT_CHECK_ERROR_COUNTER = Counter( - "alerts_check_failures", - "Number of alert check errors that don't notify the user", -) - -ALERT_COMPUTED_COUNTER = Counter( - "alerts_computed", - "Number of alerts we calculated", -) - ANIRUDH_DISTINCT_ID = "wcPbDRs08GtNzrNIXfzHvYAkwUaekW7UrAo4y3coznT" @@ -102,8 +80,6 @@ def alerts_backlog_task() -> None: ) ).count() - HOURLY_ALERTS_BACKLOG_GAUGE.set(hourly_alerts_breaching_sla) - now = datetime.now(UTC) daily_alerts_breaching_sla = AlertConfiguration.objects.filter( @@ -114,8 +90,6 @@ def alerts_backlog_task() -> None: ) ).count() - DAILY_ALERTS_BACKLOG_GAUGE.set(daily_alerts_breaching_sla) - with ph_us_client() as capture_ph_event: capture_ph_event( ANIRUDH_DISTINCT_ID, @@ -135,9 +109,6 @@ def alerts_backlog_task() -> None: }, ) - # sleeping 30s for prometheus to pick up the metrics sent during task - time.sleep(30) - @shared_task( ignore_result=True, @@ -266,7 +237,6 @@ def check_alert(alert_id: str, capture_ph_event: Callable = lambda *args, **kwar try: check_alert_and_notify_atomically(alert, capture_ph_event) except Exception as err: - ALERT_CHECK_ERROR_COUNTER.inc() user = cast(User, alert.created_by) capture_ph_event( @@ -309,9 +279,6 @@ def check_alert_and_notify_atomically(alert: AlertConfiguration, capture_ph_even so we can retry notification without re-computing insight. """ set_tag("alert_config_id", alert.id) - - ALERT_COMPUTED_COUNTER.inc() - user = cast(User, alert.created_by) # Event to count alert checks @@ -426,9 +393,7 @@ def add_alert_check( # IMPORTANT: update next_check_at according to interval # ensure we don't recheck alert until the next interval is due - alert.next_check_at = (alert.next_check_at or now) + alert_calculation_interval_to_relativedelta( - cast(AlertCalculationInterval, alert.calculation_interval) - ) + alert.next_check_at = next_check_time(alert) if notify: alert.last_notified_at = now diff --git a/posthog/tasks/alerts/test/test_alert_checks.py b/posthog/tasks/alerts/test/test_alert_checks.py index 02faef2dc9bd63..17cfcce2dca79b 100644 --- a/posthog/tasks/alerts/test/test_alert_checks.py +++ b/posthog/tasks/alerts/test/test_alert_checks.py @@ -100,7 +100,7 @@ def test_alert_is_triggered_for_values_above_higher_threshold( anomalies_descriptions = self.get_breach_description(mock_send_notifications_for_breaches, call_index=0) assert len(anomalies_descriptions) == 1 assert ( - "The insight value ($pageview) for previous interval (1) is more than upper threshold (0.0)" + "The insight value ($pageview) for current interval (1) is more than upper threshold (0.0)" in anomalies_descriptions[0] ) diff --git a/posthog/tasks/alerts/test/test_trends_absolute_alerts.py b/posthog/tasks/alerts/test/test_trends_absolute_alerts.py index af4376fbe13074..9a65a4297f9787 100644 --- a/posthog/tasks/alerts/test/test_trends_absolute_alerts.py +++ b/posthog/tasks/alerts/test/test_trends_absolute_alerts.py @@ -1,9 +1,11 @@ from typing import Optional, Any from unittest.mock import ANY, MagicMock, patch -import dateutil - from freezegun import freeze_time +import dateutil +import pytz +import datetime + from posthog.models.alert import AlertCheck from posthog.models.instance_setting import set_instance_setting from posthog.tasks.alerts.checks import check_alert @@ -24,6 +26,7 @@ ) from posthog.models import AlertConfiguration +# 8:55 AM FROZEN_TIME = dateutil.parser.parse("2024-06-02T08:55:00.000Z") @@ -40,7 +43,12 @@ def setUp(self) -> None: self.dashboard_api = DashboardAPI(self.client, self.team, self.assertEqual) def create_alert( - self, insight: dict, series_index: int, lower: Optional[int] = None, upper: Optional[int] = None + self, + insight: dict, + series_index: int, + lower: Optional[int] = None, + upper: Optional[int] = None, + calculation_interval: AlertCalculationInterval = AlertCalculationInterval.DAILY, ) -> dict: alert = self.client.post( f"/api/projects/{self.team.id}/alerts", @@ -53,14 +61,16 @@ def create_alert( "series_index": series_index, }, "condition": {"type": "absolute_value"}, - "calculation_interval": AlertCalculationInterval.DAILY, + "calculation_interval": calculation_interval, "threshold": {"configuration": {"type": "absolute", "bounds": {"lower": lower, "upper": upper}}}, }, ).json() return alert - def create_time_series_trend_insight(self, breakdown: Optional[BreakdownFilter] = None) -> dict[str, Any]: + def create_time_series_trend_insight( + self, breakdown: Optional[BreakdownFilter] = None, interval: IntervalType = IntervalType.WEEK + ) -> dict[str, Any]: query_dict = TrendsQuery( series=[ EventsNode( @@ -75,7 +85,7 @@ def create_time_series_trend_insight(self, breakdown: Optional[BreakdownFilter] ], breakdownFilter=breakdown, trendsFilter=TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH), - interval=IntervalType.WEEK, + interval=interval, dateRange=InsightDateRange(date_from="-8w"), ).model_dump() @@ -131,7 +141,11 @@ def test_alert_lower_threshold_breached(self, mock_send_breaches: MagicMock, moc assert updated_alert.state == AlertState.FIRING assert updated_alert.last_checked_at == FROZEN_TIME assert updated_alert.last_notified_at == FROZEN_TIME - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") assert alert_check.calculated_value == 0 @@ -165,7 +179,11 @@ def test_trend_high_threshold_breached(self, mock_send_breaches: MagicMock, mock updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") assert alert_check.calculated_value == 2 @@ -178,9 +196,11 @@ def test_trend_high_threshold_breached(self, mock_send_breaches: MagicMock, mock def test_trend_no_threshold_breached(self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock) -> None: insight = self.create_time_series_trend_insight() - alert = self.create_alert(insight, series_index=0, lower=0, upper=2) + alert = self.create_alert( + insight, series_index=0, lower=0, upper=2, calculation_interval=AlertCalculationInterval.MONTHLY + ) - with freeze_time(FROZEN_TIME - dateutil.relativedelta.relativedelta(days=1)): + with freeze_time(FROZEN_TIME): _create_event( team=self.team, event="signed_up", @@ -193,10 +213,49 @@ def test_trend_no_threshold_breached(self, mock_send_breaches: MagicMock, mock_s updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) assert updated_alert.state == AlertState.NOT_FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = datetime.datetime(2024, 7, 1, 4, 0, tzinfo=pytz.UTC) + # first day of next month at around 4 AM + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") - assert alert_check.calculated_value == 1 + assert alert_check.calculated_value == 0 + assert alert_check.state == AlertState.NOT_FIRING + assert alert_check.error is None + + def test_trend_no_threshold_breached_weekly( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight() + alert = self.create_alert( + insight, series_index=0, lower=0, upper=2, calculation_interval=AlertCalculationInterval.WEEKLY + ) + + with freeze_time(FROZEN_TIME): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + check_alert(alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) + assert updated_alert.state == AlertState.NOT_FIRING + + next_check = ( + FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1, weekday=dateutil.relativedelta.MO(1)) + ).replace(hour=3, tzinfo=pytz.UTC) + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") + assert alert_check.calculated_value == 0 assert alert_check.state == AlertState.NOT_FIRING assert alert_check.error is None @@ -231,7 +290,11 @@ def test_trend_breakdown_high_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") assert alert_check.calculated_value == 2 @@ -273,7 +336,11 @@ def test_trend_breakdown_low_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") assert alert_check.calculated_value == 1 @@ -315,7 +382,11 @@ def test_trend_breakdown_no_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) assert updated_alert.state == AlertState.NOT_FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") assert alert_check.calculated_value is None @@ -355,7 +426,11 @@ def test_aggregate_trend_high_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") assert alert_check.calculated_value == 3 @@ -363,7 +438,7 @@ def test_aggregate_trend_high_threshold_breached( assert alert_check.error is None mock_send_breaches.assert_called_once_with( - ANY, ["The insight value (signed_up) for previous interval (3) is more than upper threshold (1.0)"] + ANY, ["The insight value (signed_up) for current interval (3) is more than upper threshold (1.0)"] ) def test_aggregate_trend_with_breakdown_high_threshold_breached( @@ -397,13 +472,187 @@ def test_aggregate_trend_with_breakdown_high_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_once_with( + ANY, ["The insight value (signed_up - Chrome) for current interval (2) is more than upper threshold (1.0)"] + ) + + def test_trend_current_interval_high_threshold_breached( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight() + alert = self.create_alert(insight, series_index=0, upper=1) + + # around 8 AM on same day as check + with freeze_time(FROZEN_TIME - dateutil.relativedelta.relativedelta(hours=1)): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + check_alert(alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) + assert updated_alert.state == AlertState.FIRING + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_once_with( + ANY, ["The insight value (signed_up) for current week (2) is more than upper threshold (1.0)"] + ) + + def test_trend_current_interval_fallback_to_previous_high_threshold_breached( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.DAY) + alert = self.create_alert(insight, series_index=0, upper=1) + + # current day doesn't breach + with freeze_time(FROZEN_TIME - dateutil.relativedelta.relativedelta(hours=1)): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # prev day breaches + with freeze_time(FROZEN_TIME - dateutil.relativedelta.relativedelta(hours=26)): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + check_alert(alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) + assert updated_alert.state == AlertState.FIRING + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") assert alert_check.calculated_value == 2 assert alert_check.state == AlertState.FIRING assert alert_check.error is None + # should be 'previous' week as current week check should fallback + mock_send_breaches.assert_called_once_with( + ANY, ["The insight value (signed_up) for previous day (2) is more than upper threshold (1.0)"] + ) + + def test_trend_current_interval_no_threshold_breached( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.DAY) + alert = self.create_alert(insight, series_index=0, upper=1) + + # day before yesterday + with freeze_time( + FROZEN_TIME - dateutil.relativedelta.relativedelta(days=2) - dateutil.relativedelta.relativedelta(hours=2) + ): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + check_alert(alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) + assert updated_alert.state == AlertState.NOT_FIRING + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") + assert alert_check.calculated_value == 0 + assert alert_check.state == AlertState.NOT_FIRING + assert alert_check.error is None + + def test_trend_current_interval_low_threshold_breached( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight() + alert = self.create_alert(insight, series_index=0, lower=2) + + # around 8 AM on same day as check + with freeze_time(FROZEN_TIME - dateutil.relativedelta.relativedelta(hours=1)): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + check_alert(alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) + assert updated_alert.state == AlertState.FIRING + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at is not None + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") + # will be 0 even thought for current day it's 1 + # it's because it's absolute alert and lower threshold + # so current day isn't checked, it directly checks previous day + assert alert_check.calculated_value == 0 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + mock_send_breaches.assert_called_once_with( - ANY, ["The insight value (signed_up - Chrome) for previous interval (2) is more than upper threshold (1.0)"] + ANY, ["The insight value (signed_up) for previous week (0) is less than lower threshold (2.0)"] ) diff --git a/posthog/tasks/alerts/test/test_trends_relative_alerts.py b/posthog/tasks/alerts/test/test_trends_relative_alerts.py index de530d3817fd76..b213ea2c47e54f 100644 --- a/posthog/tasks/alerts/test/test_trends_relative_alerts.py +++ b/posthog/tasks/alerts/test/test_trends_relative_alerts.py @@ -1,10 +1,10 @@ from typing import Optional, Any from unittest.mock import ANY, call, MagicMock, patch -import dateutil - +from freezegun import freeze_time +import dateutil import dateutil.relativedelta -from freezegun import freeze_time +import pytz from posthog.models.alert import AlertCheck from posthog.models.instance_setting import set_instance_setting @@ -123,7 +123,11 @@ def test_alert_properties(self, mock_send_breaches: MagicMock, mock_send_errors: assert updated_alert.state == AlertState.FIRING assert updated_alert.last_checked_at == FROZEN_TIME assert updated_alert.last_notified_at == FROZEN_TIME - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") assert alert_check.calculated_value == 0 @@ -168,7 +172,11 @@ def test_relative_increase_absolute_upper_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") @@ -199,7 +207,7 @@ def test_relative_increase_upper_threshold_breached( insight, series_index=0, condition_type=AlertConditionType.RELATIVE_INCREASE, - threshold_type=InsightThresholdType.ABSOLUTE, + threshold_type=InsightThresholdType.PERCENTAGE, upper=0.2, ) @@ -247,7 +255,11 @@ def test_relative_increase_upper_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") @@ -259,7 +271,11 @@ def test_relative_increase_upper_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") @@ -329,7 +345,11 @@ def test_relative_increase_lower_threshold_breached_1( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") @@ -346,7 +366,11 @@ def test_relative_increase_lower_threshold_breached_1( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") @@ -421,7 +445,11 @@ def test_relative_increase_lower_threshold_breached_2( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") @@ -433,7 +461,11 @@ def test_relative_increase_lower_threshold_breached_2( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") @@ -508,7 +540,11 @@ def test_relative_decrease_upper_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") @@ -524,7 +560,11 @@ def test_relative_decrease_upper_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") @@ -598,7 +638,11 @@ def test_relative_decrease_lower_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") @@ -614,7 +658,11 @@ def test_relative_decrease_lower_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") @@ -694,7 +742,11 @@ def test_relative_increase_no_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.NOT_FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") assert alert_check.calculated_value == 2 @@ -705,7 +757,11 @@ def test_relative_increase_no_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.NOT_FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") assert alert_check.calculated_value == 2 @@ -779,7 +835,11 @@ def test_relative_decrease_no_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.NOT_FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") assert alert_check.calculated_value == 2 @@ -790,7 +850,11 @@ def test_relative_decrease_no_threshold_breached( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.NOT_FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") assert alert_check.calculated_value == (2 / 3) @@ -878,7 +942,11 @@ def test_breakdown_relative_increase_upper_breached( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") @@ -890,7 +958,11 @@ def test_breakdown_relative_increase_upper_breached( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") @@ -996,7 +1068,11 @@ def test_breakdown_relative_increase_lower_breached( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") @@ -1008,7 +1084,11 @@ def test_breakdown_relative_increase_lower_breached( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") @@ -1114,7 +1194,11 @@ def test_breakdown_relative_decrease_lower_breached( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") @@ -1126,7 +1210,11 @@ def test_breakdown_relative_decrease_lower_breached( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") @@ -1233,7 +1321,11 @@ def test_breakdown_relative_decrease_upper_breached( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") @@ -1245,7 +1337,11 @@ def test_breakdown_relative_decrease_upper_breached( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") @@ -1340,7 +1436,11 @@ def test_breakdown_relative_decrease_no_breaches( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.NOT_FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") @@ -1352,7 +1452,11 @@ def test_breakdown_relative_decrease_no_breaches( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.NOT_FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") @@ -1432,7 +1536,11 @@ def test_breakdown_relative_increase_no_breaches( updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) assert updated_alert.state == AlertState.NOT_FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") @@ -1444,7 +1552,11 @@ def test_breakdown_relative_increase_no_breaches( updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) assert updated_alert.state == AlertState.NOT_FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") @@ -1453,3 +1565,409 @@ def test_breakdown_relative_increase_no_breaches( assert alert_check.error is None mock_send_breaches.assert_not_called() + + def test_current_interval_relative_increase_upper_threshold_breached( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + + # alert if sign ups increase by more than 1 + absolute_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + upper=1, + ) + + # alert if sign ups increase by more than 20% + percentage_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.PERCENTAGE, + upper=0.2, + ) + + # FROZEN_TIME is on Tue, insight has weekly interval + # we aggregate our weekly insight numbers to display for Sun (19th May, 26th May, 2nd June) + + # set current interval to have 3 events + with freeze_time(FROZEN_TIME): + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="3", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="4", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous interval (last week) to have 1 event + last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=1) + + with freeze_time(last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous to previous interval (last to last week) to also have 1 event + # so event shouldn't fire for the previous week + last_to_last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=2) + + with freeze_time(last_to_last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # alert should fire as we had *increase* in events of (2 or 200%) week over week + check_alert(absolute_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) + assert updated_alert.state == AlertState.FIRING + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_once_with( + ANY, ["The insight value (signed_up) for current week (2) increased more than upper threshold (1.0)"] + ) + + check_alert(percentage_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) + assert updated_alert.state == AlertState.FIRING + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_with( + ANY, + ["The insight value (signed_up) for current week (200.00%) increased more than upper threshold (20.00%)"], + ) + + def test_current_interval_relative_increase_fallback_upper_threshold_breached( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + + # alert if sign ups increase by more than 1 + absolute_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + upper=1, + ) + + # alert if sign ups increase by more than 20% + percentage_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.PERCENTAGE, + upper=0.2, + ) + + # FROZEN_TIME is on Tue, insight has weekly interval + # we aggregate our weekly insight numbers to display for Sun (19th May, 26th May, 2nd June) + + # set current interval to have 1 events + with freeze_time(FROZEN_TIME): + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous interval (last week) to have 3 events + last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=1) + + with freeze_time(last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="3", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="4", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous to previous interval (last to last week) to also have 1 event + # so event shouldn't fire for the previous week + last_to_last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=2) + + with freeze_time(last_to_last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # alert should fire as we had *increase* in events of (2 or 200%) week over week + check_alert(absolute_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) + assert updated_alert.state == AlertState.FIRING + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + # should be 'previous' week as we haven't breached for current week + # so logic fallback to previous week + mock_send_breaches.assert_called_once_with( + ANY, ["The insight value (signed_up) for previous week (2) increased more than upper threshold (1.0)"] + ) + + check_alert(percentage_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) + assert updated_alert.state == AlertState.FIRING + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_with( + ANY, + ["The insight value (signed_up) for previous week (200.00%) increased more than upper threshold (20.00%)"], + ) + + def test_relative_increase_when_previous_value_is_0( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + + # alert if sign ups increase by more than 1 + absolute_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + upper=1, + ) + + # alert if sign ups increase by more than 20% + percentage_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.PERCENTAGE, + upper=0.2, + ) + + # FROZEN_TIME is on Tue, insight has weekly interval + # we aggregate our weekly insight numbers to display for Sun (19th May, 26th May, 2nd June) + + # set previous interval (last week) to have 2 events + last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=1) + + with freeze_time(last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="3", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous to previous interval (last to last week) to have 0 events + + # alert should fire as we had *increase* in events of (infinity) week over week + check_alert(absolute_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) + assert updated_alert.state == AlertState.FIRING + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + # should be 'previous' week as we haven't breached for current week + # so logic fallback to previous week + mock_send_breaches.assert_called_once_with( + ANY, ["The insight value (signed_up) for previous week (2) increased more than upper threshold (1.0)"] + ) + + check_alert(percentage_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) + assert updated_alert.state == AlertState.FIRING + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == float("inf") + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_with( + ANY, + ["The insight value (signed_up) for previous week (inf%) increased more than upper threshold (20.00%)"], + ) + + def test_relative_decrease_when_previous_value_is_0( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + + # alert if sign ups decreases by more than 1 + absolute_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_DECREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + upper=1, + ) + + # alert if sign ups decreases by more than 20% + percentage_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_DECREASE, + threshold_type=InsightThresholdType.PERCENTAGE, + upper=0.2, + ) + + # FROZEN_TIME is on Tue, insight has weekly interval + # we aggregate our weekly insight numbers to display for Sun (19th May, 26th May, 2nd June) + + # set previous interval (last week) to have 2 events + last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=1) + + with freeze_time(last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="3", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous to previous interval (last to last week) to have 0 events + + # alert should fire as we had *decrease* in events of (infinity) week over week + check_alert(absolute_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) + assert updated_alert.state == AlertState.NOT_FIRING + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == -2 + assert alert_check.state == AlertState.NOT_FIRING + assert alert_check.error is None + + check_alert(percentage_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) + assert updated_alert.state == AlertState.FIRING + + next_check = (FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1)).replace(hour=1, tzinfo=pytz.UTC) + assert updated_alert.next_check_at + assert updated_alert.next_check_at.hour == next_check.hour + assert updated_alert.next_check_at.date() == next_check.date() + + alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == float("inf") + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_with( + ANY, + ["The insight value (signed_up) for previous week (inf%) decreased more than upper threshold (20.00%)"], + ) diff --git a/posthog/tasks/alerts/trends.py b/posthog/tasks/alerts/trends.py index 9a3bd54c8bccd2..0175d414498d40 100644 --- a/posthog/tasks/alerts/trends.py +++ b/posthog/tasks/alerts/trends.py @@ -43,6 +43,17 @@ class TrendResult(TypedDict): def check_trends_alert(alert: AlertConfiguration, insight: Insight, query: TrendsQuery) -> AlertEvaluationResult: + """ + Calculates insight value for the needed time periods and compares it with the threshold. + + Generally we check the insight value for the previous interval (day/week... grouping set on the trend insight) and compare it with the threshold/value for interval before that. + This is done because we need the current interval to complete before comparing against threshold. + (eg. if needing to check value < X, need to wait as more events will come in before interval finishes) + + But in some cases (when check_current_interval = True) like value > X or value inc > X, we can check the value for the current interval and alert right away if threshold is breached. + So then we check current interval value first and alert if threshold breached, otherwise fallback and process previous interval. + """ + if "type" in alert.config and alert.config["type"] == "TrendsAlertConfig": config = TrendsAlertConfig.model_validate(alert.config) else: @@ -51,14 +62,14 @@ def check_trends_alert(alert: AlertConfiguration, insight: Insight, query: Trend condition = AlertCondition.model_validate(alert.condition) threshold = InsightThreshold.model_validate(alert.threshold.configuration) if alert.threshold else None - if not threshold: + if not threshold or not threshold.bounds: return AlertEvaluationResult(value=0, breaches=[]) has_breakdown = query.breakdownFilter and ( (query.breakdownFilter.breakdown and query.breakdownFilter.breakdown_type) or query.breakdownFilter.breakdowns ) - is_non_time_series = _is_non_time_series_trend(query) + check_current_interval = False match condition.type: case AlertConditionType.ABSOLUTE_VALUE: @@ -87,14 +98,32 @@ def check_trends_alert(alert: AlertConfiguration, insight: Insight, query: Trend interval = query.interval if not is_non_time_series else None + if threshold.bounds.upper is not None: + # checking for value > X so we can also check current interval value + check_current_interval = True + if has_breakdown: # for breakdowns, we need to check all values in calculation_result.result breakdown_results = calculation_result.result for breakdown_result in breakdown_results: - # pick previous interval value + if check_current_interval: + current_interval_value = _pick_interval_value_from_trend_result(query, breakdown_result, 0) + breaches = _breach_messages( + bounds=threshold.bounds, + calculated_value=current_interval_value, + threshold_type=threshold.type, + condition_type=condition.type, + interval_type=interval, + series=breakdown_result["label"], + is_current_interval=True, + ) + if breaches: + return AlertEvaluationResult(value=current_interval_value, breaches=breaches) + + # fall back to check previous interval prev_interval_value = _pick_interval_value_from_trend_result(query, breakdown_result, -1) - breaches = _validate_bounds( + breaches = _breach_messages( threshold.bounds, prev_interval_value, threshold.type, @@ -102,7 +131,6 @@ def check_trends_alert(alert: AlertConfiguration, insight: Insight, query: Trend interval, breakdown_result["label"], ) - if breaches: # found one breakdown value that breached the threshold return AlertEvaluationResult(value=prev_interval_value, breaches=breaches) @@ -113,8 +141,24 @@ def check_trends_alert(alert: AlertConfiguration, insight: Insight, query: Trend # for non breakdowns, we pick the series (config.series_index) from calculation_result.result selected_series_result = _pick_series_result(config, calculation_result) + if check_current_interval: + # pick current interval value + current_interval_value = _pick_interval_value_from_trend_result(query, selected_series_result, 0) + breaches = _breach_messages( + threshold.bounds, + current_interval_value, + threshold.type, + condition.type, + interval, + selected_series_result["label"], + is_current_interval=True, + ) + if breaches: + return AlertEvaluationResult(value=current_interval_value, breaches=breaches) + + # fall back to check previous interval prev_interval_value = _pick_interval_value_from_trend_result(query, selected_series_result, -1) - breaches = _validate_bounds( + breaches = _breach_messages( threshold.bounds, prev_interval_value, threshold.type, @@ -133,7 +177,6 @@ def check_trends_alert(alert: AlertConfiguration, insight: Insight, query: Trend # so we need to compute the trend values for last 3 intervals # and then compare the previous interval with value for the interval before previous filters_overrides = _date_range_override_for_intervals(query, last_x_intervals=3) - calculation_result = calculate_for_query_based_insight( insight, team=alert.team, @@ -142,11 +185,11 @@ def check_trends_alert(alert: AlertConfiguration, insight: Insight, query: Trend filters_override=filters_overrides, ) - results_to_evaluate = [] + results_to_evaluate: list[TrendResult] = [] if has_breakdown: # for breakdowns, we need to check all values in calculation_result.result - breakdown_results = calculation_result.result + breakdown_results = cast(list[TrendResult], calculation_result.result) results_to_evaluate.extend(breakdown_results) else: # for non breakdowns, we pick the series (config.series_index) from calculation_result.result @@ -161,40 +204,74 @@ def check_trends_alert(alert: AlertConfiguration, insight: Insight, query: Trend increase = None breaches = [] + if threshold.bounds.upper is not None: + # checking for value increased > X so we can also check current interval value + # as can alert right away if current interval value - previous interval value > upper threshold + check_current_interval = True + for result in results_to_evaluate: + current_interval_value = _pick_interval_value_from_trend_result(query, result, 0) prev_interval_value = _pick_interval_value_from_trend_result(query, result, -1) prev_prev_interval_value = _pick_interval_value_from_trend_result(query, result, -2) - if threshold.type == InsightThresholdType.ABSOLUTE: - increase = prev_interval_value - prev_prev_interval_value - breaches = _validate_bounds( + if check_current_interval: + if threshold.type == InsightThresholdType.ABSOLUTE: + increase = current_interval_value - prev_interval_value + elif threshold.type == InsightThresholdType.PERCENTAGE: + if prev_interval_value == 0 and current_interval_value == 0: + increase = 0 + elif prev_interval_value == 0: + increase = float("inf") + else: + increase = (current_interval_value - prev_interval_value) / prev_interval_value + else: + raise ValueError( + f"Neither relative nor absolute threshold configured for alert condition RELATIVE_INCREASE" + ) + + breaches = _breach_messages( threshold.bounds, increase, threshold.type, condition.type, query.interval, result["label"], + is_current_interval=True, ) + + if breaches: + # found a breach for one of the results so alert + return AlertEvaluationResult(value=increase, breaches=breaches) + + # fallback to check previous intervals + if threshold.type == InsightThresholdType.ABSOLUTE: + increase = prev_interval_value - prev_prev_interval_value elif threshold.type == InsightThresholdType.PERCENTAGE: - increase = (prev_interval_value - prev_prev_interval_value) / prev_prev_interval_value - breaches = _validate_bounds( - threshold.bounds, - increase, - threshold.type, - condition.type, - query.interval, - result["label"], - ) + if prev_prev_interval_value == 0 and prev_interval_value == 0: + increase = 0 + elif prev_prev_interval_value == 0: + increase = float("inf") + else: + increase = (prev_interval_value - prev_prev_interval_value) / prev_prev_interval_value else: raise ValueError( f"Neither relative nor absolute threshold configured for alert condition RELATIVE_INCREASE" ) + breaches = _breach_messages( + threshold.bounds, + increase, + threshold.type, + condition.type, + query.interval, + result["label"], + ) + if breaches: # found a breach for one of the results so alert return AlertEvaluationResult(value=increase, breaches=breaches) - return AlertEvaluationResult(value=(increase if not has_breakdown else None), breaches=breaches) + return AlertEvaluationResult(value=(increase if not has_breakdown else None), breaches=[]) case AlertConditionType.RELATIVE_DECREASE: if is_non_time_series: @@ -205,7 +282,6 @@ def check_trends_alert(alert: AlertConfiguration, insight: Insight, query: Trend # so we need to compute the trend values for last 3 intervals # and then compare the previous interval with value for the interval before previous filters_overrides = _date_range_override_for_intervals(query, last_x_intervals=3) - calculation_result = calculate_for_query_based_insight( insight, team=alert.team, @@ -238,34 +314,32 @@ def check_trends_alert(alert: AlertConfiguration, insight: Insight, query: Trend if threshold.type == InsightThresholdType.ABSOLUTE: decrease = prev_prev_interval_value - prev_interval_value - breaches = _validate_bounds( - threshold.bounds, - decrease, - threshold.type, - condition.type, - query.interval, - result["label"], - ) elif threshold.type == InsightThresholdType.PERCENTAGE: - decrease = (prev_prev_interval_value - prev_interval_value) / prev_prev_interval_value - breaches = _validate_bounds( - threshold.bounds, - decrease, - threshold.type, - condition.type, - query.interval, - result["label"], - ) + if prev_prev_interval_value == 0 and prev_interval_value == 0: + decrease = 0 + elif prev_prev_interval_value == 0: + decrease = float("inf") + else: + decrease = (prev_prev_interval_value - prev_interval_value) / prev_prev_interval_value else: raise ValueError( f"Neither relative nor absolute threshold configured for alert condition RELATIVE_INCREASE" ) + breaches = _breach_messages( + threshold.bounds, + decrease, + threshold.type, + condition.type, + query.interval, + result["label"], + ) + if breaches: # found a breach for one of the results so alert return AlertEvaluationResult(value=decrease, breaches=breaches) - return AlertEvaluationResult(value=(decrease if not has_breakdown else None), breaches=breaches) + return AlertEvaluationResult(value=(decrease if not has_breakdown else None), breaches=[]) case _: raise NotImplementedError(f"Unsupported alert condition type: {condition.type}") @@ -318,19 +392,16 @@ def _pick_interval_value_from_trend_result(query: TrendsQuery, result: TrendResu return data[index_from_back] -def _validate_bounds( - bounds: InsightsThresholdBounds | None, +def _breach_messages( + bounds: InsightsThresholdBounds, calculated_value: float, threshold_type: InsightThresholdType, condition_type: AlertConditionType, interval_type: IntervalType | None, series: str, + is_current_interval: bool = False, ) -> list[str]: - if not bounds: - return [] - is_percentage = threshold_type == InsightThresholdType.PERCENTAGE - formatted_value = f"{calculated_value:.2%}" if is_percentage else calculated_value match condition_type: @@ -344,12 +415,13 @@ def _validate_bounds( if bounds.lower is not None and calculated_value < bounds.lower: lower_value = f"{bounds.lower:.2%}" if is_percentage else bounds.lower return [ - f"The insight value ({series}) for previous {interval_type or 'interval'} ({formatted_value}) {condition_text} less than lower threshold ({lower_value})" + f"The insight value ({series}) for {'current' if is_current_interval else 'previous'} {interval_type or 'interval'} ({formatted_value}) {condition_text} less than lower threshold ({lower_value})" ] + if bounds.upper is not None and calculated_value > bounds.upper: upper_value = f"{bounds.upper:.2%}" if is_percentage else bounds.upper return [ - f"The insight value ({series}) for previous {interval_type or 'interval'} ({formatted_value}) {condition_text} more than upper threshold ({upper_value})" + f"The insight value ({series}) for {'current' if is_current_interval else 'previous'} {interval_type or 'interval'} ({formatted_value}) {condition_text} more than upper threshold ({upper_value})" ] return [] diff --git a/posthog/tasks/alerts/utils.py b/posthog/tasks/alerts/utils.py index 686ec8a1355fee..28bebd0aa43e32 100644 --- a/posthog/tasks/alerts/utils.py +++ b/posthog/tasks/alerts/utils.py @@ -1,6 +1,8 @@ -from dateutil.relativedelta import relativedelta - +from dateutil.relativedelta import relativedelta, MO from django.utils import timezone +import pytz + +from datetime import datetime import structlog from posthog.email import EmailMessage @@ -56,6 +58,51 @@ def alert_calculation_interval_to_relativedelta(alert_calculation_interval: Aler raise ValueError(f"Invalid alert calculation interval: {alert_calculation_interval}") +def next_check_time(alert: AlertConfiguration) -> datetime: + """ + Rule by calculation interval + + hourly alerts -> want them to run at the same min every hour (same min comes from creation time so that they're spread out and don't all run at the start of the hour) + daily alerts -> want them to run at the start of the day (around 1am) by the timezone of the team + weekly alerts -> want them to run at the start of the week (Mon around 3am) by the timezone of the team + monthly alerts -> want them to run at the start of the month (first day of the month around 4am) by the timezone of the team + """ + now = datetime.now(pytz.UTC) + team_timezone = pytz.timezone(alert.team.timezone) + + match alert.calculation_interval: + case AlertCalculationInterval.HOURLY: + return (alert.next_check_at or now) + relativedelta(hours=1) + case AlertCalculationInterval.DAILY: + # Get the next date in the specified timezone + tomorrow_local = datetime.now(team_timezone) + relativedelta(days=1) + + # set hour to 1 AM + # only replacing hour and not minute/second... to distribute execution of all daily alerts + one_am_local = tomorrow_local.replace(hour=1) + + # Convert to UTC + return one_am_local.astimezone(pytz.utc) + case AlertCalculationInterval.WEEKLY: + next_monday_local = datetime.now(team_timezone) + relativedelta(days=1, weekday=MO(1)) + + # Set the hour to around 3 AM on next Monday + next_monday_1am_local = next_monday_local.replace(hour=3) + + # Convert to UTC + return next_monday_1am_local.astimezone(pytz.utc) + case AlertCalculationInterval.MONTHLY: + next_month_local = datetime.now(team_timezone) + relativedelta(months=1) + + # Set hour to 4 AM on first day of next month + next_month_1am_local = next_month_local.replace(day=1, hour=4) + + # Convert to UTC + return next_month_1am_local.astimezone(pytz.utc) + case _: + raise ValueError(f"Invalid alert calculation interval: {alert.calculation_interval}") + + def send_notifications_for_breaches(alert: AlertConfiguration, breaches: list[str]) -> None: subject = f"PostHog alert {alert.name} is firing" campaign_key = f"alert-firing-notification-{alert.id}-{timezone.now().timestamp()}" diff --git a/posthog/tasks/periodic_digest.py b/posthog/tasks/periodic_digest.py new file mode 100644 index 00000000000000..c0b6995b1e91f4 --- /dev/null +++ b/posthog/tasks/periodic_digest.py @@ -0,0 +1,271 @@ +import dataclasses +from datetime import datetime, timedelta +from typing import Any, Optional +from zoneinfo import ZoneInfo + +import structlog +from celery import shared_task +from dateutil import parser +from django.db.models import QuerySet +from django.utils import timezone +from sentry_sdk import capture_exception + +from posthog.models.dashboard import Dashboard +from posthog.models.event_definition import EventDefinition +from posthog.models.experiment import Experiment +from posthog.models.feature_flag import FeatureFlag +from posthog.models.feedback.survey import Survey +from posthog.models.messaging import MessagingRecord +from posthog.models.team.team import Team +from posthog.session_recordings.models.session_recording_playlist import ( + SessionRecordingPlaylist, +) +from posthog.tasks.usage_report import ( + USAGE_REPORT_TASK_KWARGS, + capture_report, + get_instance_metadata, +) +from posthog.tasks.utils import CeleryQueue +from posthog.warehouse.models.external_data_source import ExternalDataSource + +logger = structlog.get_logger(__name__) + + +@dataclasses.dataclass +class periodicDigestReport: + new_dashboards: list[dict[str, str]] + new_event_definitions: list[dict[str, str]] + new_playlists: list[dict[str, str]] + new_experiments_launched: list[dict[str, str]] + new_experiments_completed: list[dict[str, str]] + new_external_data_sources: list[dict[str, str]] + new_surveys_launched: list[dict[str, str]] + new_feature_flags: list[dict[str, str]] + + +def get_teams_for_digest() -> list[Team]: + from django.db.models import Q + + return list( + Team.objects.select_related("organization") + .exclude(Q(organization__for_internal_metrics=True) | Q(is_demo=True)) + .only("id", "name", "organization__id", "organization__name", "organization__created_at") + ) + + +def get_teams_with_new_dashboards(end: datetime, begin: datetime) -> QuerySet: + return Dashboard.objects.filter(created_at__gt=begin, created_at__lte=end).values("team_id", "name", "id") + + +def get_teams_with_new_event_definitions(end: datetime, begin: datetime) -> QuerySet: + return EventDefinition.objects.filter(created_at__gt=begin, created_at__lte=end).values("team_id", "name", "id") + + +def get_teams_with_new_playlists(end: datetime, begin: datetime) -> QuerySet: + return SessionRecordingPlaylist.objects.filter(created_at__gt=begin, created_at__lte=end).values( + "team_id", "name", "short_id" + ) + + +def get_teams_with_new_experiments_launched(end: datetime, begin: datetime) -> QuerySet: + return Experiment.objects.filter(start_date__gt=begin, start_date__lte=end).values( + "team_id", "name", "id", "start_date" + ) + + +def get_teams_with_new_experiments_completed(end: datetime, begin: datetime) -> QuerySet: + return Experiment.objects.filter(end_date__gt=begin, end_date__lte=end).values( + "team_id", "name", "id", "start_date", "end_date" + ) + + +def get_teams_with_new_external_data_sources(end: datetime, begin: datetime) -> QuerySet: + return ExternalDataSource.objects.filter(created_at__gt=begin, created_at__lte=end, deleted=False).values( + "team_id", "source_type", "id" + ) + + +def get_teams_with_new_surveys_launched(end: datetime, begin: datetime) -> QuerySet: + return Survey.objects.filter(start_date__gt=begin, start_date__lte=end).values( + "team_id", "name", "id", "description", "start_date" + ) + + +def get_teams_with_new_feature_flags(end: datetime, begin: datetime) -> QuerySet: + return ( + FeatureFlag.objects.filter( + created_at__gt=begin, + created_at__lte=end, + deleted=False, + ) + .exclude(name__contains="Feature Flag for Experiment") + .exclude(name__contains="Targeting flag for survey") + .values("team_id", "name", "id", "key") + ) + + +def convert_team_digest_items_to_dict(items: QuerySet) -> dict[int, QuerySet]: + return {team_id: items.filter(team_id=team_id) for team_id in items.values_list("team_id", flat=True).distinct()} + + +def count_non_zero_digest_items(report: periodicDigestReport) -> int: + return sum(1 for key in report.__dataclass_fields__ if len(getattr(report, key)) > 0) + + +def _get_all_digest_data_as_team_rows(period_start: datetime, period_end: datetime) -> dict[str, Any]: + all_digest_data = _get_all_digest_data(period_start, period_end) + # convert it to a map of team_id -> value + for key, rows in all_digest_data.items(): + all_digest_data[key] = convert_team_digest_items_to_dict(rows) + return all_digest_data + + +def _get_all_digest_data(period_start: datetime, period_end: datetime) -> dict[str, Any]: + return { + "teams_with_new_dashboards": get_teams_with_new_dashboards(period_end, period_start), + "teams_with_new_event_definitions": get_teams_with_new_event_definitions(period_end, period_start), + "teams_with_new_playlists": get_teams_with_new_playlists(period_end, period_start), + "teams_with_new_experiments_launched": get_teams_with_new_experiments_launched(period_end, period_start), + "teams_with_new_experiments_completed": get_teams_with_new_experiments_completed(period_end, period_start), + "teams_with_new_external_data_sources": get_teams_with_new_external_data_sources(period_end, period_start), + "teams_with_new_surveys_launched": get_teams_with_new_surveys_launched(period_end, period_start), + "teams_with_new_feature_flags": get_teams_with_new_feature_flags(period_end, period_start), + } + + +def get_periodic_digest_report(all_digest_data: dict[str, Any], team: Team) -> periodicDigestReport: + return periodicDigestReport( + new_dashboards=[ + {"name": dashboard.get("name"), "id": dashboard.get("id")} + for dashboard in all_digest_data["teams_with_new_dashboards"].get(team.id, []) + ], + new_event_definitions=[ + {"name": event_definition.get("name"), "id": event_definition.get("id")} + for event_definition in all_digest_data["teams_with_new_event_definitions"].get(team.id, []) + ], + new_playlists=[ + {"name": playlist.get("name"), "id": playlist.get("short_id")} + for playlist in all_digest_data["teams_with_new_playlists"].get(team.id, []) + ], + new_experiments_launched=[ + { + "name": experiment.get("name"), + "id": experiment.get("id"), + "start_date": experiment.get("start_date").isoformat(), + } + for experiment in all_digest_data["teams_with_new_experiments_launched"].get(team.id, []) + ], + new_experiments_completed=[ + { + "name": experiment.get("name"), + "id": experiment.get("id"), + "start_date": experiment.get("start_date").isoformat(), + "end_date": experiment.get("end_date").isoformat(), + } + for experiment in all_digest_data["teams_with_new_experiments_completed"].get(team.id, []) + ], + new_external_data_sources=[ + {"source_type": source.get("source_type"), "id": source.get("id")} + for source in all_digest_data["teams_with_new_external_data_sources"].get(team.id, []) + ], + new_surveys_launched=[ + { + "name": survey.get("name"), + "id": survey.get("id"), + "start_date": survey.get("start_date").isoformat(), + "description": survey.get("description"), + } + for survey in all_digest_data["teams_with_new_surveys_launched"].get(team.id, []) + ], + new_feature_flags=[ + {"name": feature_flag.get("name"), "id": feature_flag.get("id"), "key": feature_flag.get("key")} + for feature_flag in all_digest_data["teams_with_new_feature_flags"].get(team.id, []) + ], + ) + + +@shared_task(queue=CeleryQueue.USAGE_REPORTS.value, ignore_result=True, max_retries=3) +def send_periodic_digest_report( + *, + team_id: int, + team_name: str, + periodic_digest_report: dict[str, Any], + instance_metadata: dict[str, Any], + period_end: datetime, + period_start: datetime, + digest_items_with_data: int, +) -> None: + period_str = period_end.strftime("%Y-%m-%d") + days = (period_end - period_start).days + campaign_key = f"periodic_digest_{period_str}_{days}d" + + # Use a consistent identifier for the team + team_identifier = f"team_{team_id}" + + # Check if we've already sent this digest using get_or_create + record, created = MessagingRecord.objects.get_or_create(raw_email=team_identifier, campaign_key=campaign_key) + + if not created and record.sent_at: + logger.info(f"Skipping duplicate periodic digest for team {team_id} for period ending {period_str}") + return + + full_report_dict = { + "team_id": team_id, + "team_name": team_name, + "template": "periodic_digest_report", + "digest_items_with_data": digest_items_with_data, + **periodic_digest_report, + **instance_metadata, + } + + capture_report.delay( + capture_event_name="transactional email", + team_id=team_id, + full_report_dict=full_report_dict, + send_for_all_members=True, + ) + + # Mark as sent + record.sent_at = timezone.now() + record.save() + + +@shared_task(**USAGE_REPORT_TASK_KWARGS, max_retries=0) +def send_all_periodic_digest_reports( + dry_run: bool = False, + end_date: Optional[str] = None, + begin_date: Optional[str] = None, +) -> None: + period_end = ( + parser.parse(end_date) + if end_date + else datetime.now(tz=ZoneInfo("UTC")).replace(hour=0, minute=0, second=0, microsecond=0) + ) + period_start = parser.parse(begin_date) if begin_date else period_end - timedelta(days=7) + + try: + all_digest_data = _get_all_digest_data_as_team_rows(period_start, period_end) + teams = get_teams_for_digest() + time_now = datetime.now() + for team in teams: + report = get_periodic_digest_report(all_digest_data, team) + full_report_dict = dataclasses.asdict(report) + instance_metadata = dataclasses.asdict(get_instance_metadata((period_start, period_end))) + digest_items_with_data = count_non_zero_digest_items(report) + + # Then capture as events to PostHog, so they can be sent via email + if digest_items_with_data > 0 and not dry_run: + send_periodic_digest_report.delay( + team_id=team.id, + team_name=team.name, + periodic_digest_report=full_report_dict, + instance_metadata=instance_metadata, + period_end=period_end, + period_start=period_start, + digest_items_with_data=digest_items_with_data, + ) + time_since = datetime.now() - time_now + logger.debug(f"Sending usage reports to PostHog and Billing took {time_since.total_seconds()} seconds.") # noqa T201 + except Exception as err: + capture_exception(err) + raise diff --git a/posthog/tasks/scheduled.py b/posthog/tasks/scheduled.py index a53077a9754a13..8912d1e362be39 100644 --- a/posthog/tasks/scheduled.py +++ b/posthog/tasks/scheduled.py @@ -9,12 +9,13 @@ from posthog.caching.warming import schedule_warming_for_teams_task from posthog.celery import app from posthog.tasks.alerts.checks import ( + alerts_backlog_task, check_alerts_task, checks_cleanup_task, - alerts_backlog_task, reset_stuck_alerts_task, ) from posthog.tasks.integrations import refresh_integrations +from posthog.tasks.periodic_digest import send_all_periodic_digest_reports from posthog.tasks.tasks import ( calculate_cohort, calculate_decide_usage, @@ -51,9 +52,9 @@ sync_all_organization_available_product_features, update_event_partitions, update_quota_limiting, + update_survey_adaptive_sampling, update_survey_iteration, verify_persons_data_in_sync, - update_survey_adaptive_sampling, ) from posthog.utils import get_crontab @@ -131,6 +132,13 @@ def setup_periodic_tasks(sender: Celery, **kwargs: Any) -> None: name="update quota limiting", ) + # Send all periodic digest reports + sender.add_periodic_task( + crontab(hour="9", minute="0", day_of_week="mon"), + send_all_periodic_digest_reports.s(), + name="send all weekly digest reports", + ) + # PostHog Cloud cron jobs # NOTE: We can't use is_cloud here as some Django elements aren't loaded yet. We check in the task execution instead # Verify that persons data is in sync every day at 4 AM UTC diff --git a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr index 03fbd544879172..563d97146fefe2 100644 --- a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr +++ b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr @@ -415,9 +415,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -980,9 +1045,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -1214,6 +1344,64 @@ AND "posthog_featureflag"."team_id" = 99999) ''' # --- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.40 + ''' + SELECT "posthog_scheduledchange"."id", + "posthog_scheduledchange"."record_id", + "posthog_scheduledchange"."model_name", + "posthog_scheduledchange"."payload", + "posthog_scheduledchange"."scheduled_at", + "posthog_scheduledchange"."executed_at", + "posthog_scheduledchange"."failure_reason", + "posthog_scheduledchange"."team_id", + "posthog_scheduledchange"."created_at", + "posthog_scheduledchange"."created_by_id", + "posthog_scheduledchange"."updated_at" + FROM "posthog_scheduledchange" + WHERE "posthog_scheduledchange"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.41 + ''' + SELECT "posthog_scheduledchange"."id", + "posthog_scheduledchange"."record_id", + "posthog_scheduledchange"."model_name", + "posthog_scheduledchange"."payload", + "posthog_scheduledchange"."scheduled_at", + "posthog_scheduledchange"."executed_at", + "posthog_scheduledchange"."failure_reason", + "posthog_scheduledchange"."team_id", + "posthog_scheduledchange"."created_at", + "posthog_scheduledchange"."created_by_id", + "posthog_scheduledchange"."updated_at" + FROM "posthog_scheduledchange" + WHERE "posthog_scheduledchange"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.42 + ''' + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE "posthog_featureflag"."key" = 'flag-1' + LIMIT 21 + ''' +# --- # name: TestProcessScheduledChanges.test_schedule_feature_flag_multiple_changes.5 ''' SELECT "posthog_survey"."id", @@ -1334,9 +1522,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', diff --git a/posthog/tasks/test/test_periodic_digest.py b/posthog/tasks/test/test_periodic_digest.py new file mode 100644 index 00000000000000..4d495eac6aa8d9 --- /dev/null +++ b/posthog/tasks/test/test_periodic_digest.py @@ -0,0 +1,353 @@ +from datetime import datetime, timedelta +from unittest.mock import ANY, MagicMock, patch +from uuid import uuid4 + +from django.utils.timezone import now +from freezegun import freeze_time + +from posthog.models import Dashboard, EventDefinition, Experiment, FeatureFlag, Survey +from posthog.models.messaging import MessagingRecord +from posthog.session_recordings.models.session_recording_playlist import ( + SessionRecordingPlaylist, +) +from posthog.tasks.periodic_digest import send_all_periodic_digest_reports +from posthog.test.base import APIBaseTest +from posthog.warehouse.models import ExternalDataSource + + +@freeze_time("2024-01-01T00:01:00Z") # A Monday +class TestPeriodicDigestReport(APIBaseTest): + def setUp(self) -> None: + super().setUp() + self.distinct_id = str(uuid4()) + + @freeze_time("2024-01-20T00:01:00Z") + @patch("posthog.tasks.periodic_digest.capture_report") + def test_periodic_digest_report(self, mock_capture: MagicMock) -> None: + # Create test data from "last week" + with freeze_time("2024-01-15T00:01:00Z"): + # Create a dashboard + dashboard = Dashboard.objects.create( + team=self.team, + name="Test Dashboard", + ) + + # Create an event definition + event_definition = EventDefinition.objects.create( + team=self.team, + name="Test Event", + ) + + # Create a playlist + playlist = SessionRecordingPlaylist.objects.create( + team=self.team, + name="Test Playlist", + ) + + # Create experiments + # this flag should not be included in the digest + flag_for_launched_experiment = FeatureFlag.objects.create( + team=self.team, + name="Feature Flag for Experiment My experiment 1", + key="flag-for-launched-experiment", + ) + launched_experiment = Experiment.objects.create( + team=self.team, + name="Launched Experiment", + start_date=now(), + feature_flag=flag_for_launched_experiment, + ) + + # Create external data source + external_data_source = ExternalDataSource.objects.create( + team=self.team, + source_id="test_source", + connection_id="test_connection", + status="completed", + source_type="Stripe", + ) + + # Create a survey + # this flag should not be included in the digest since it's generated for the survey + flag_for_survey = FeatureFlag.objects.create( + team=self.team, + name="Targeting flag for survey My survey", + key="feature-flag-for-survey", + ) + survey = Survey.objects.create( + team=self.team, + name="Test Survey", + description="Test Description", + start_date=now(), + targeting_flag=flag_for_survey, + ) + + # Create a feature flag + feature_flag = FeatureFlag.objects.create( + team=self.team, + name="Test Flag", + key="test-flag", + ) + + with freeze_time("2024-01-10T00:01:00Z"): + # this flag should not be included in the digest + flag_for_completed_experiment = FeatureFlag.objects.create( + team=self.team, + name="Feature Flag for Experiment My experiment 2", + key="feature-flag-for-completed-experiment", + ) + # completed experiment is not included in the list of launched experiments + # but is included in the list of completed experiments + completed_experiment = Experiment.objects.create( + team=self.team, + name="Completed Experiment", + start_date=now() + timedelta(days=1), + end_date=now() + timedelta(days=6), + feature_flag=flag_for_completed_experiment, + ) + + # Run the periodic digest report task + send_all_periodic_digest_reports() + + # Check that the capture event was called with the correct data + expected_properties = { + "team_id": self.team.id, + "team_name": self.team.name, + "template": "periodic_digest_report", + "users_who_logged_in": [], + "users_who_logged_in_count": 0, + "users_who_signed_up": [], + "users_who_signed_up_count": 0, + "period": { + "end_inclusive": "2024-01-20T00:00:00+00:00", + "start_inclusive": "2024-01-13T00:00:00+00:00", + }, + "plugins_enabled": {}, + "plugins_installed": {}, + "product": "open source", + "realm": "hosted-clickhouse", + "site_url": "http://localhost:8000", + "table_sizes": ANY, + "clickhouse_version": ANY, + "deployment_infrastructure": "unknown", + "helm": {}, + "instance_tag": "none", + "new_dashboards": [ + { + "name": "Test Dashboard", + "id": dashboard.id, + } + ], + "new_event_definitions": [ + { + "name": "Test Event", + "id": event_definition.id, + } + ], + "new_playlists": [ + { + "name": "Test Playlist", + "id": playlist.short_id, + } + ], + "new_experiments_launched": [ + { + "name": "Launched Experiment", + "id": launched_experiment.id, + "start_date": launched_experiment.start_date.isoformat(), # type: ignore + } + ], + "new_experiments_completed": [ + { + "name": "Completed Experiment", + "id": completed_experiment.id, + "start_date": completed_experiment.start_date.isoformat(), # type: ignore + "end_date": completed_experiment.end_date.isoformat(), # type: ignore + } + ], + "new_external_data_sources": [ + { + "source_type": "Stripe", + "id": external_data_source.id, + } + ], + "new_surveys_launched": [ + { + "name": "Test Survey", + "id": survey.id, + "start_date": survey.start_date.isoformat(), # type: ignore + "description": "Test Description", + } + ], + "new_feature_flags": [ + { + "name": "Test Flag", + "id": feature_flag.id, + "key": "test-flag", + } + ], + "digest_items_with_data": 8, + } + + mock_capture.delay.assert_called_once_with( + capture_event_name="transactional email", + team_id=self.team.id, + full_report_dict=expected_properties, + send_for_all_members=True, + ) + + @patch("posthog.tasks.periodic_digest.capture_report") + def test_periodic_digest_report_dry_run(self, mock_capture: MagicMock) -> None: + send_all_periodic_digest_reports(dry_run=True) + mock_capture.delay.assert_not_called() + + @patch("posthog.tasks.periodic_digest.capture_report") + def test_periodic_digest_report_custom_dates(self, mock_capture: MagicMock) -> None: + # Create test data + with freeze_time("2024-01-15T00:01:00Z"): + dashboard = Dashboard.objects.create( + team=self.team, + name="Test Dashboard", + ) + with freeze_time("2024-01-13T00:01:00Z"): + # outside the range, should be excluded + Dashboard.objects.create( + team=self.team, + name="Test Dashboard", + ) + + with freeze_time("2024-01-16T00:01:00Z"): + end_date = datetime.now() + begin_date = end_date - timedelta(days=2) + + # Run the periodic digest report task with custom dates + send_all_periodic_digest_reports(begin_date=begin_date.isoformat(), end_date=end_date.isoformat()) + + # Check that the capture event was called with the correct data + expected_properties = { + "team_id": self.team.id, + "team_name": self.team.name, + "template": "periodic_digest_report", + "users_who_logged_in": [], + "users_who_logged_in_count": 0, + "users_who_signed_up": [], + "users_who_signed_up_count": 0, + "period": { + "end_inclusive": "2024-01-16T00:01:00", + "start_inclusive": "2024-01-14T00:01:00", + }, + "plugins_enabled": {}, + "plugins_installed": {}, + "product": "open source", + "realm": "hosted-clickhouse", + "site_url": "http://localhost:8000", + "table_sizes": ANY, + "clickhouse_version": ANY, + "deployment_infrastructure": "unknown", + "helm": {}, + "instance_tag": "none", + "new_dashboards": [ + { + "name": "Test Dashboard", + "id": dashboard.id, + } + ], + "new_event_definitions": [], + "new_playlists": [], + "new_experiments_launched": [], + "new_experiments_completed": [], + "new_external_data_sources": [], + "new_surveys_launched": [], + "new_feature_flags": [], + "digest_items_with_data": 1, + } + + mock_capture.delay.assert_called_once_with( + capture_event_name="transactional email", + team_id=self.team.id, + full_report_dict=expected_properties, + send_for_all_members=True, + ) + + @freeze_time("2024-01-20T00:01:00Z") + @patch("posthog.tasks.periodic_digest.capture_report") + def test_periodic_digest_report_idempotency(self, mock_capture: MagicMock) -> None: + # Create test data + with freeze_time("2024-01-15T00:01:00Z"): + Dashboard.objects.create( + team=self.team, + name="Test Dashboard", + ) + + # First run - should send the digest + send_all_periodic_digest_reports() + + # Verify first call + mock_capture.delay.assert_called_once() + mock_capture.delay.reset_mock() + + # Check that messaging record was created + record = MessagingRecord.objects.get( # type: ignore + raw_email=f"team_{self.team.id}", campaign_key="periodic_digest_2024-01-20_7d" + ) + self.assertIsNotNone(record.sent_at) + + # Second run - should not send the digest again + send_all_periodic_digest_reports() + mock_capture.delay.assert_not_called() + + # Verify only one record exists + self.assertEqual(MessagingRecord.objects.count(), 1) + + @freeze_time("2024-01-20T00:01:00Z") + @patch("posthog.tasks.periodic_digest.capture_report") + def test_periodic_digest_different_periods(self, mock_capture: MagicMock) -> None: + # Create test data + with freeze_time("2024-01-15T00:01:00Z"): + Dashboard.objects.create( + team=self.team, + name="Test Dashboard", + ) + + # Send weekly digest + send_all_periodic_digest_reports() + mock_capture.delay.assert_called_once() + mock_capture.delay.reset_mock() + + # Send monthly digest (different period length) + send_all_periodic_digest_reports( + begin_date=(datetime.now() - timedelta(days=30)).isoformat(), end_date=datetime.now().isoformat() + ) + mock_capture.delay.assert_called_once() + + # Verify two different records exist + records = MessagingRecord.objects.filter(raw_email=f"team_{self.team.id}") # type: ignore + self.assertEqual(records.count(), 2) + campaign_keys = sorted([r.campaign_key for r in records]) + self.assertEqual(campaign_keys, ["periodic_digest_2024-01-20_30d", "periodic_digest_2024-01-20_7d"]) + + @freeze_time("2024-01-20T00:01:00Z") + @patch("posthog.tasks.periodic_digest.capture_report") + def test_periodic_digest_empty_report_no_record(self, mock_capture: MagicMock) -> None: + # Run without any data (empty digest) + send_all_periodic_digest_reports() + + # Verify no capture call and no messaging record + mock_capture.delay.assert_not_called() + self.assertEqual(MessagingRecord.objects.count(), 0) + + @freeze_time("2024-01-20T00:01:00Z") + @patch("posthog.tasks.periodic_digest.capture_report") + def test_periodic_digest_dry_run_no_record(self, mock_capture: MagicMock) -> None: + # Create test data + Dashboard.objects.create( + team=self.team, + name="Test Dashboard", + ) + + # Run in dry_run mode + send_all_periodic_digest_reports(dry_run=True) + + # Verify no capture call and no messaging record + mock_capture.delay.assert_not_called() + self.assertEqual(MessagingRecord.objects.count(), 0) diff --git a/posthog/tasks/test/test_usage_report.py b/posthog/tasks/test/test_usage_report.py index ca5a46af1f914f..a0c14a08a82d05 100644 --- a/posthog/tasks/test/test_usage_report.py +++ b/posthog/tasks/test/test_usage_report.py @@ -50,6 +50,7 @@ APIBaseTest, ClickhouseDestroyTablesMixin, ClickhouseTestMixin, + QueryMatchingTest, _create_event, _create_person, also_test_with_materialized_columns, @@ -140,7 +141,7 @@ def _setup_replay_data(team_id: int, include_mobile_replay: bool) -> None: @freeze_time("2022-01-10T00:01:00Z") -class UsageReport(APIBaseTest, ClickhouseTestMixin, ClickhouseDestroyTablesMixin): +class UsageReport(APIBaseTest, ClickhouseTestMixin, ClickhouseDestroyTablesMixin, QueryMatchingTest): def setUp(self) -> None: super().setUp() @@ -1600,11 +1601,11 @@ def test_capture_event_called_with_string_timestamp(self, mock_client: MagicMock mock_posthog = MagicMock() mock_client.return_value = mock_posthog capture_event( - mock_client, - "test event", - organization.id, - {"prop1": "val1"}, - "2021-10-10T23:01:00.00Z", + pha_client=mock_client, + name="test event", + organization_id=organization.id, + properties={"prop1": "val1"}, + timestamp="2021-10-10T23:01:00.00Z", ) assert mock_client.capture.call_args[1]["timestamp"] == datetime(2021, 10, 10, 23, 1, tzinfo=tzutc()) diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py index 9bd8619317fa6b..41ca2d8c86f633 100644 --- a/posthog/tasks/usage_report.py +++ b/posthog/tasks/usage_report.py @@ -336,20 +336,23 @@ def send_report_to_billing_service(org_id: str, report: dict[str, Any]) -> None: capture_exception(err) pha_client = Client("sTMFPsFhdP1Ssg") capture_event( - pha_client, - f"organization usage report to billing service failure", - org_id, - {"err": str(err)}, + pha_client=pha_client, + name=f"organization usage report to billing service failure", + organization_id=org_id, + properties={"err": str(err)}, ) raise def capture_event( + *, pha_client: Client, name: str, - organization_id: str, + organization_id: Optional[str] = None, + team_id: Optional[int] = None, properties: dict[str, Any], timestamp: Optional[Union[datetime, str]] = None, + send_for_all_members: bool = False, ) -> None: if timestamp and isinstance(timestamp, str): try: @@ -357,16 +360,38 @@ def capture_event( except ValueError: timestamp = None + if not organization_id and not team_id: + raise ValueError("Either organization_id or team_id must be provided") + if is_cloud(): - org_owner = get_org_owner_or_first_user(organization_id) - distinct_id = org_owner.distinct_id if org_owner and org_owner.distinct_id else f"org-{organization_id}" - pha_client.capture( - distinct_id, - name, - {**properties, "scope": "user"}, - groups={"organization": organization_id, "instance": settings.SITE_URL}, - timestamp=timestamp, - ) + distinct_ids = [] + if send_for_all_members: + if organization_id: + distinct_ids = list( + OrganizationMembership.objects.filter(organization_id=organization_id).values_list( + "user__distinct_id", flat=True + ) + ) + elif team_id: + team = Team.objects.get(id=team_id) + distinct_ids = [user.distinct_id for user in team.all_users_with_access()] + else: + if not organization_id: + team = Team.objects.get(id=team_id) + organization_id = team.organization_id + org_owner = get_org_owner_or_first_user(organization_id) if organization_id else None + distinct_ids.append( + org_owner.distinct_id if org_owner and org_owner.distinct_id else f"org-{organization_id}" + ) + + for distinct_id in distinct_ids: + pha_client.capture( + distinct_id, + name, + {**properties, "scope": "user"}, + groups={"organization": organization_id, "instance": settings.SITE_URL}, + timestamp=timestamp, + ) pha_client.group_identify("organization", organization_id, properties) else: pha_client.capture( @@ -710,20 +735,40 @@ def get_teams_with_hog_function_fetch_calls_in_period( @shared_task(**USAGE_REPORT_TASK_KWARGS, max_retries=0) def capture_report( + *, capture_event_name: str, - org_id: str, + org_id: Optional[str] = None, + team_id: Optional[int] = None, full_report_dict: dict[str, Any], at_date: Optional[datetime] = None, + send_for_all_members: bool = False, ) -> None: + if not org_id and not team_id: + raise ValueError("Either org_id or team_id must be provided") pha_client = Client("sTMFPsFhdP1Ssg") try: - capture_event(pha_client, capture_event_name, org_id, full_report_dict, timestamp=at_date) + capture_event( + pha_client=pha_client, + name=capture_event_name, + organization_id=org_id, + team_id=team_id, + properties=full_report_dict, + timestamp=at_date, + send_for_all_members=send_for_all_members, + ) logger.info(f"UsageReport sent to PostHog for organization {org_id}") except Exception as err: logger.exception( f"UsageReport sent to PostHog for organization {org_id} failed: {str(err)}", ) - capture_event(pha_client, f"{capture_event_name} failure", org_id, {"error": str(err)}) + capture_event( + pha_client=pha_client, + name=f"{capture_event_name} failure", + organization_id=org_id, + team_id=team_id, + properties={"error": str(err)}, + send_for_all_members=send_for_all_members, + ) pha_client.flush() @@ -936,7 +981,7 @@ def _get_teams_for_usage_reports() -> Sequence[Team]: return list( Team.objects.select_related("organization") .exclude(Q(organization__for_internal_metrics=True) | Q(is_demo=True)) - .only("id", "organization__id", "organization__name", "organization__created_at") + .only("id", "name", "organization__id", "organization__name", "organization__created_at") ) @@ -1041,6 +1086,7 @@ def _get_all_org_reports(period_start: datetime, period_end: datetime) -> dict[s logger.info("Getting all usage data...") # noqa T201 time_now = datetime.now() all_data = _get_all_usage_data_as_team_rows(period_start, period_end) + logger.debug(f"Getting all usage data took {(datetime.now() - time_now).total_seconds()} seconds.") # noqa T201 logger.info("Getting teams for usage reports...") # noqa T201 @@ -1108,7 +1154,12 @@ def send_all_org_usage_reports( # First capture the events to PostHog if not skip_capture_event: at_date_str = at_date.isoformat() if at_date else None - capture_report.delay(capture_event_name, org_id, full_report_dict, at_date_str) + capture_report.delay( + capture_event_name=capture_event_name, + org_id=org_id, + full_report_dict=full_report_dict, + at_date=at_date_str, + ) # Then capture the events to Billing if has_non_zero_usage(full_report): diff --git a/posthog/temporal/batch_exports/__init__.py b/posthog/temporal/batch_exports/__init__.py index 33c1b200e6a976..a3616f1107c5b4 100644 --- a/posthog/temporal/batch_exports/__init__.py +++ b/posthog/temporal/batch_exports/__init__.py @@ -17,6 +17,12 @@ HttpBatchExportWorkflow, insert_into_http_activity, ) +from posthog.temporal.batch_exports.monitoring import ( + BatchExportMonitoringWorkflow, + get_batch_export, + get_event_counts, + update_batch_export_runs, +) from posthog.temporal.batch_exports.noop import NoOpWorkflow, noop_activity from posthog.temporal.batch_exports.postgres_batch_export import ( PostgresBatchExportWorkflow, @@ -54,6 +60,7 @@ SnowflakeBatchExportWorkflow, HttpBatchExportWorkflow, SquashPersonOverridesWorkflow, + BatchExportMonitoringWorkflow, ] ACTIVITIES = [ @@ -76,4 +83,7 @@ update_batch_export_backfill_model_status, wait_for_mutation, wait_for_table, + get_batch_export, + get_event_counts, + update_batch_export_runs, ] diff --git a/posthog/temporal/batch_exports/bigquery_batch_export.py b/posthog/temporal/batch_exports/bigquery_batch_export.py index f752b77bf72ce0..30c600d210802d 100644 --- a/posthog/temporal/batch_exports/bigquery_batch_export.py +++ b/posthog/temporal/batch_exports/bigquery_batch_export.py @@ -69,9 +69,19 @@ # Raised when table_id isn't valid. Sadly, `ValueError` is rather generic, but we # don't anticipate a `ValueError` thrown from our own export code. "ValueError", + # Raised when attempting to run a batch export without required BigQuery permissions. + # Our own version of `Forbidden`. + "MissingRequiredPermissionsError", ] +class MissingRequiredPermissionsError(Exception): + """Raised when missing required permissions in BigQuery.""" + + def __init__(self): + super().__init__("Missing required permissions to run this batch export") + + def get_bigquery_fields_from_record_schema( record_schema: pa.Schema, known_json_columns: collections.abc.Sequence[str] ) -> list[bigquery.SchemaField]: @@ -237,7 +247,12 @@ async def managed_table( yield table finally: if delete is True: - await self.adelete_table(project_id, dataset_id, table_id, not_found_ok) + try: + await self.adelete_table(project_id, dataset_id, table_id, not_found_ok) + except Forbidden: + await logger.awarning( + "Missing delete permissions to delete %s.%s.%s", project_id, dataset_id, table_id + ) async def amerge_tables( self, @@ -274,6 +289,28 @@ async def amerge_tables( final_table, stage_table, merge_key=merge_key, stage_fields_cast_to_json=stage_fields_cast_to_json ) + async def acheck_for_query_permissions_on_table( + self, + table: bigquery.Table, + ): + """Attempt to SELECT from table to check for query permissions.""" + job_config = bigquery.QueryJobConfig() + if "timestamp" in [field.name for field in table.schema]: + query = f""" + SELECT 1 FROM `{table.full_table_id.replace(":", ".", 1)}` WHERE timestamp IS NOT NULL + """ + else: + query = f""" + SELECT 1 FROM `{table.full_table_id.replace(":", ".", 1)}` + """ + + try: + query_job = self.query(query, job_config=job_config) + await asyncio.to_thread(query_job.result) + except Forbidden: + return False + return True + async def ainsert_into_from_stage_table( self, into_table: bigquery.Table, @@ -289,7 +326,9 @@ async def ainsert_into_from_stage_table( else: fields_to_cast = set() stage_table_fields = ",".join( - f"PARSE_JSON(`{field.name}`)" if field.name in fields_to_cast else f"`{field.name}`" + f"PARSE_JSON(`{field.name}`, wide_number_mode=>'round')" + if field.name in fields_to_cast + else f"`{field.name}`" for field in into_table.schema ) @@ -339,7 +378,9 @@ async def amerge_person_tables( field_names += ", " stage_field = ( - f"PARSE_JSON(stage.`{field.name}`)" if field.name in fields_to_cast else f"stage.`{field.name}`" + f"PARSE_JSON(stage.`{field.name}`, wide_number_mode=>'round')" + if field.name in fields_to_cast + else f"stage.`{field.name}`" ) update_clause += f"final.`{field.name}` = {stage_field}" field_names += f"`{field.name}`" @@ -350,7 +391,17 @@ async def amerge_person_tables( merge_query = f""" MERGE `{final_table.full_table_id.replace(":", ".", 1)}` final - USING `{stage_table.full_table_id.replace(":", ".", 1)}` stage + USING ( + SELECT * FROM + ( + SELECT + *, + ROW_NUMBER() OVER (PARTITION BY {",".join(field.name for field in merge_key)}) row_num + FROM + `{stage_table.full_table_id.replace(":", ".", 1)}` + ) + WHERE row_num = 1 + ) stage {merge_condition} WHEN MATCHED AND (stage.`{person_version_key}` > final.`{person_version_key}` OR stage.`{person_distinct_id_version_key}` > final.`{person_distinct_id_version_key}`) THEN @@ -468,11 +519,12 @@ def __init__( heartbeater: Heartbeater, heartbeat_details: BigQueryHeartbeatDetails, data_interval_start: dt.datetime | str | None, + writer_format: WriterFormat, bigquery_client: BigQueryClient, bigquery_table: bigquery.Table, table_schema: list[BatchExportField], ): - super().__init__(heartbeater, heartbeat_details, data_interval_start) + super().__init__(heartbeater, heartbeat_details, data_interval_start, writer_format) self.bigquery_client = bigquery_client self.bigquery_table = bigquery_table self.table_schema = table_schema @@ -495,7 +547,10 @@ async def flush( self.bigquery_table, ) - await self.bigquery_client.load_parquet_file(batch_export_file, self.bigquery_table, self.table_schema) + if self.writer_format == WriterFormat.PARQUET: + await self.bigquery_client.load_parquet_file(batch_export_file, self.bigquery_table, self.table_schema) + else: + await self.bigquery_client.load_jsonl_file(batch_export_file, self.bigquery_table, self.table_schema) await self.logger.adebug("Loaded %s to BigQuery table '%s'", records_since_last_flush, self.bigquery_table) self.rows_exported_counter.add(records_since_last_flush) @@ -620,53 +675,62 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> Records stage_table_name = f"stage_{inputs.table_id}_{data_interval_end_str}" with bigquery_client(inputs) as bq_client: - async with ( - bq_client.managed_table( + async with bq_client.managed_table( + project_id=inputs.project_id, + dataset_id=inputs.dataset_id, + table_id=inputs.table_id, + table_schema=schema, + delete=False, + ) as bigquery_table: + can_perform_merge = await bq_client.acheck_for_query_permissions_on_table(bigquery_table) + + if not can_perform_merge: + if model_name == "persons": + raise MissingRequiredPermissionsError() + + await logger.awarning( + "Missing query permissions on BigQuery table required for merging, will attempt direct load into final table" + ) + + async with bq_client.managed_table( project_id=inputs.project_id, dataset_id=inputs.dataset_id, - table_id=inputs.table_id, - table_schema=schema, - delete=False, - ) as bigquery_table, - bq_client.managed_table( - project_id=inputs.project_id, - dataset_id=inputs.dataset_id, - table_id=stage_table_name, - table_schema=stage_schema, - create=True, - delete=True, - ) as bigquery_stage_table, - ): - records_completed = await run_consumer_loop( - queue=queue, - consumer_cls=BigQueryConsumer, - producer_task=producer_task, - heartbeater=heartbeater, - heartbeat_details=details, - data_interval_end=data_interval_end, - data_interval_start=data_interval_start, - schema=record_batch_schema, - writer_format=WriterFormat.PARQUET, - max_bytes=settings.BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES, - non_retryable_error_types=NON_RETRYABLE_ERROR_TYPES, - json_columns=(), - bigquery_client=bq_client, - bigquery_table=bigquery_stage_table, + table_id=stage_table_name if can_perform_merge else inputs.table_id, table_schema=stage_schema, - writer_file_kwargs={"compression": "zstd"}, - ) - - merge_key = ( - bigquery.SchemaField("team_id", "INT64"), - bigquery.SchemaField("distinct_id", "STRING"), - ) - await bq_client.amerge_tables( - final_table=bigquery_table, - stage_table=bigquery_stage_table, - mutable=True if model_name == "persons" else False, - merge_key=merge_key, - stage_fields_cast_to_json=json_columns, - ) + create=can_perform_merge, + delete=can_perform_merge, + ) as bigquery_stage_table: + records_completed = await run_consumer_loop( + queue=queue, + consumer_cls=BigQueryConsumer, + producer_task=producer_task, + heartbeater=heartbeater, + heartbeat_details=details, + data_interval_end=data_interval_end, + data_interval_start=data_interval_start, + schema=record_batch_schema, + writer_format=WriterFormat.PARQUET if can_perform_merge else WriterFormat.JSONL, + max_bytes=settings.BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES, + json_columns=() if can_perform_merge else json_columns, + bigquery_client=bq_client, + bigquery_table=bigquery_stage_table if can_perform_merge else bigquery_table, + table_schema=stage_schema if can_perform_merge else schema, + writer_file_kwargs={"compression": "zstd"} if can_perform_merge else {}, + multiple_files=True, + ) + + if can_perform_merge: + merge_key = ( + bigquery.SchemaField("team_id", "INT64"), + bigquery.SchemaField("distinct_id", "STRING"), + ) + await bq_client.amerge_tables( + final_table=bigquery_table, + stage_table=bigquery_stage_table, + mutable=True if model_name == "persons" else False, + merge_key=merge_key, + stage_fields_cast_to_json=json_columns, + ) return records_completed diff --git a/posthog/temporal/batch_exports/monitoring.py b/posthog/temporal/batch_exports/monitoring.py new file mode 100644 index 00000000000000..97eaf6c2430d90 --- /dev/null +++ b/posthog/temporal/batch_exports/monitoring.py @@ -0,0 +1,227 @@ +import datetime as dt +import json +from dataclasses import dataclass +from uuid import UUID + +from temporalio import activity, workflow +from temporalio.common import RetryPolicy + +from posthog.batch_exports.models import BatchExport +from posthog.batch_exports.service import aupdate_records_total_count +from posthog.batch_exports.sql import EVENT_COUNT_BY_INTERVAL +from posthog.temporal.batch_exports.base import PostHogWorkflow +from posthog.temporal.common.clickhouse import get_client +from posthog.temporal.common.heartbeat import Heartbeater + + +class BatchExportNotFoundError(Exception): + """Exception raised when batch export is not found.""" + + def __init__(self, batch_export_id: UUID): + super().__init__(f"Batch export with id {batch_export_id} not found") + + +class NoValidBatchExportsFoundError(Exception): + """Exception raised when no valid batch export is found.""" + + def __init__(self, message: str = "No valid batch exports found"): + super().__init__(message) + + +@dataclass +class BatchExportMonitoringInputs: + """Inputs for the BatchExportMonitoringWorkflow. + + Attributes: + batch_export_id: The batch export id to monitor. + """ + + batch_export_id: UUID + + +@dataclass +class BatchExportDetails: + id: UUID + team_id: int + interval: str + exclude_events: list[str] + include_events: list[str] + + +@activity.defn +async def get_batch_export(batch_export_id: UUID) -> BatchExportDetails: + """Fetch a batch export from the database and return its details.""" + batch_export = ( + await BatchExport.objects.filter(id=batch_export_id, model="events", paused=False, deleted=False) + .prefetch_related("destination") + .afirst() + ) + if batch_export is None: + raise BatchExportNotFoundError(batch_export_id) + if batch_export.deleted is True: + raise NoValidBatchExportsFoundError("Batch export has been deleted") + if batch_export.paused is True: + raise NoValidBatchExportsFoundError("Batch export is paused") + if batch_export.model != "events": + raise NoValidBatchExportsFoundError("Batch export model is not 'events'") + if batch_export.interval_time_delta != dt.timedelta(minutes=5): + raise NoValidBatchExportsFoundError( + "Only batch exports with interval of 5 minutes are supported for monitoring at this time." + ) + config = batch_export.destination.config + return BatchExportDetails( + id=batch_export.id, + team_id=batch_export.team_id, + interval=batch_export.interval, + exclude_events=config.get("exclude_events", []), + include_events=config.get("include_events", []), + ) + + +@dataclass +class GetEventCountsInputs: + team_id: int + interval: str + overall_interval_start: str + overall_interval_end: str + exclude_events: list[str] + include_events: list[str] + + +@dataclass +class EventCountsOutput: + interval_start: str + interval_end: str + count: int + + +@dataclass +class GetEventCountsOutputs: + results: list[EventCountsOutput] + + +@activity.defn +async def get_event_counts(inputs: GetEventCountsInputs) -> GetEventCountsOutputs: + """Get the total number of events for a given team over a set of time intervals.""" + + query = EVENT_COUNT_BY_INTERVAL + + interval = inputs.interval + # we check interval is "every 5 minutes" above but double check here + if not interval.startswith("every 5 minutes"): + raise NoValidBatchExportsFoundError( + "Only intervals of 'every 5 minutes' are supported for monitoring at this time." + ) + _, value, unit = interval.split(" ") + interval = f"{value} {unit}" + + query_params = { + "team_id": inputs.team_id, + "interval": interval, + "overall_interval_start": inputs.overall_interval_start, + "overall_interval_end": inputs.overall_interval_end, + "include_events": inputs.include_events, + "exclude_events": inputs.exclude_events, + } + async with Heartbeater(), get_client() as client: + if not await client.is_alive(): + raise ConnectionError("Cannot establish connection to ClickHouse") + + response = await client.read_query(query, query_params) + results = [] + for line in response.decode("utf-8").splitlines(): + interval_start, interval_end, count = line.strip().split("\t") + results.append( + EventCountsOutput(interval_start=interval_start, interval_end=interval_end, count=int(count)) + ) + + return GetEventCountsOutputs(results=results) + + +@dataclass +class UpdateBatchExportRunsInputs: + batch_export_id: UUID + results: list[EventCountsOutput] + + +@activity.defn +async def update_batch_export_runs(inputs: UpdateBatchExportRunsInputs) -> int: + """Update BatchExportRuns with the expected number of events.""" + + total_rows_updated = 0 + async with Heartbeater(): + for result in inputs.results: + total_rows_updated += await aupdate_records_total_count( + batch_export_id=inputs.batch_export_id, + interval_start=dt.datetime.strptime(result.interval_start, "%Y-%m-%d %H:%M:%S").replace(tzinfo=dt.UTC), + interval_end=dt.datetime.strptime(result.interval_end, "%Y-%m-%d %H:%M:%S").replace(tzinfo=dt.UTC), + count=result.count, + ) + activity.logger.info(f"Updated {total_rows_updated} BatchExportRuns") + return total_rows_updated + + +@workflow.defn(name="batch-export-monitoring") +class BatchExportMonitoringWorkflow(PostHogWorkflow): + """Workflow to monitor batch exports. + + We have had some issues with batch exports in the past, where some events + have been missing. The purpose of this workflow is to monitor the status of + batch exports for a given customer by reconciling the number of exported + events with the number of events in ClickHouse for a given interval. + """ + + @staticmethod + def parse_inputs(inputs: list[str]) -> BatchExportMonitoringInputs: + """Parse inputs from the management command CLI.""" + loaded = json.loads(inputs[0]) + return BatchExportMonitoringInputs(**loaded) + + @workflow.run + async def run(self, inputs: BatchExportMonitoringInputs): + """Workflow implementation to monitor batch exports for a given team.""" + # TODO - check if this is the right way to do logging since there seems to be a few different ways + workflow.logger.info( + "Starting batch exports monitoring workflow for batch export id %s", inputs.batch_export_id + ) + + batch_export_details = await workflow.execute_activity( + get_batch_export, + inputs.batch_export_id, + start_to_close_timeout=dt.timedelta(minutes=1), + retry_policy=RetryPolicy( + initial_interval=dt.timedelta(seconds=20), + non_retryable_error_types=["BatchExportNotFoundError", "NoValidBatchExportsFoundError"], + ), + ) + + # time interval to check is not the previous hour but the hour before that + # (just to ensure all recent batch exports have run successfully) + now = dt.datetime.now(tz=dt.UTC) + interval_end = now.replace(minute=0, second=0, microsecond=0) - dt.timedelta(hours=1) + interval_start = interval_end - dt.timedelta(hours=1) + interval_end_str = interval_end.strftime("%Y-%m-%d %H:%M:%S") + interval_start_str = interval_start.strftime("%Y-%m-%d %H:%M:%S") + + total_events = await workflow.execute_activity( + get_event_counts, + GetEventCountsInputs( + team_id=batch_export_details.team_id, + interval=batch_export_details.interval, + overall_interval_start=interval_start_str, + overall_interval_end=interval_end_str, + exclude_events=batch_export_details.exclude_events, + include_events=batch_export_details.include_events, + ), + start_to_close_timeout=dt.timedelta(hours=1), + retry_policy=RetryPolicy(maximum_attempts=3, initial_interval=dt.timedelta(seconds=20)), + heartbeat_timeout=dt.timedelta(minutes=1), + ) + + return await workflow.execute_activity( + update_batch_export_runs, + UpdateBatchExportRunsInputs(batch_export_id=batch_export_details.id, results=total_events.results), + start_to_close_timeout=dt.timedelta(hours=1), + retry_policy=RetryPolicy(maximum_attempts=3, initial_interval=dt.timedelta(seconds=20)), + heartbeat_timeout=dt.timedelta(minutes=1), + ) diff --git a/posthog/temporal/batch_exports/s3_batch_export.py b/posthog/temporal/batch_exports/s3_batch_export.py index 7201af91d2b5a0..d6e95ee28fc222 100644 --- a/posthog/temporal/batch_exports/s3_batch_export.py +++ b/posthog/temporal/batch_exports/s3_batch_export.py @@ -51,9 +51,7 @@ BatchExportTemporaryFile, WriterFormat, ) -from posthog.temporal.batch_exports.utils import ( - set_status_to_running_task, -) +from posthog.temporal.batch_exports.utils import set_status_to_running_task from posthog.temporal.common.clickhouse import get_client from posthog.temporal.common.heartbeat import Heartbeater from posthog.temporal.common.logger import bind_temporal_worker_logger @@ -67,12 +65,12 @@ "NoSuchBucket", # Couldn't connect to custom S3 endpoint "EndpointConnectionError", - # Input contained an empty S3 endpoint URL - "EmptyS3EndpointURLError", # User provided an invalid S3 key "InvalidS3Key", # All consumers failed with non-retryable errors. "RecordBatchConsumerNonRetryableExceptionGroup", + # Invalid S3 endpoint URL + "InvalidS3EndpointError", ] FILE_FORMAT_EXTENSIONS = { @@ -159,11 +157,11 @@ def __init__(self, part_number: int): super().__init__(f"An intermittent `RequestTimeout` was raised while attempting to upload part {part_number}") -class EmptyS3EndpointURLError(Exception): - """Exception raised when an S3 endpoint URL is empty string.""" +class InvalidS3EndpointError(Exception): + """Exception raised when an S3 endpoint is invalid.""" - def __init__(self): - super().__init__("Endpoint URL cannot be empty.") + def __init__(self, message: str = "Endpoint URL is invalid."): + super().__init__(message) Part = dict[str, str | int] @@ -215,7 +213,7 @@ def __init__( self.pending_parts: list[Part] = [] if self.endpoint_url == "": - raise EmptyS3EndpointURLError() + raise InvalidS3EndpointError("Endpoint URL is empty.") def to_state(self) -> S3MultiPartUploadState: """Produce state tuple that can be used to resume this S3MultiPartUpload.""" @@ -240,14 +238,19 @@ def is_upload_in_progress(self) -> bool: async def s3_client(self): """Asynchronously yield an S3 client.""" - async with self._session.client( - "s3", - region_name=self.region_name, - aws_access_key_id=self.aws_access_key_id, - aws_secret_access_key=self.aws_secret_access_key, - endpoint_url=self.endpoint_url, - ) as client: - yield client + try: + async with self._session.client( + "s3", + region_name=self.region_name, + aws_access_key_id=self.aws_access_key_id, + aws_secret_access_key=self.aws_secret_access_key, + endpoint_url=self.endpoint_url, + ) as client: + yield client + except ValueError as err: + if "Invalid endpoint" in str(err): + raise InvalidS3EndpointError(str(err)) from err + raise async def start(self) -> str: """Start this S3MultiPartUpload.""" @@ -466,9 +469,10 @@ def __init__( heartbeater: Heartbeater, heartbeat_details: S3HeartbeatDetails, data_interval_start: dt.datetime | str | None, + writer_format: WriterFormat, s3_upload: S3MultiPartUpload, ): - super().__init__(heartbeater, heartbeat_details, data_interval_start) + super().__init__(heartbeater, heartbeat_details, data_interval_start, writer_format) self.heartbeat_details: S3HeartbeatDetails = heartbeat_details self.s3_upload = s3_upload @@ -712,7 +716,6 @@ async def insert_into_s3_activity(inputs: S3InsertInputs) -> RecordsCompleted: max_bytes=settings.BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES, s3_upload=s3_upload, writer_file_kwargs={"compression": inputs.compression}, - non_retryable_error_types=NON_RETRYABLE_ERROR_TYPES, ) await s3_upload.complete() diff --git a/posthog/temporal/batch_exports/spmc.py b/posthog/temporal/batch_exports/spmc.py index c285920d966d16..253935656b1e77 100644 --- a/posthog/temporal/batch_exports/spmc.py +++ b/posthog/temporal/batch_exports/spmc.py @@ -175,11 +175,13 @@ def __init__( heartbeater: Heartbeater, heartbeat_details: BatchExportRangeHeartbeatDetails, data_interval_start: dt.datetime | str | None, + writer_format: WriterFormat, ): self.flush_start_event = asyncio.Event() self.heartbeater = heartbeater self.heartbeat_details = heartbeat_details self.data_interval_start = data_interval_start + self.writer_format = writer_format self.logger = logger @property @@ -223,10 +225,10 @@ async def start( self, queue: RecordBatchQueue, producer_task: asyncio.Task, - writer_format: WriterFormat, max_bytes: int, schema: pa.Schema, json_columns: collections.abc.Sequence[str], + multiple_files: bool = False, **kwargs, ) -> int: """Start consuming record batches from queue. @@ -234,43 +236,73 @@ async def start( Record batches will be written to a temporary file defined by `writer_format` and the file will be flushed upon reaching at least `max_bytes`. + Callers can control whether a new file is created for each flush or whether we + continue flushing to the same file by setting `multiple_files`. File data is + reset regardless, so this is not meant to impact total file size, but rather + to control whether we are exporting a single large file in multiple parts, or + multiple files that must each individually be valid. + Returns: Total number of records in all consumed record batches. """ await logger.adebug("Starting record batch consumer") schema = cast_record_batch_schema_json_columns(schema, json_columns=json_columns) - writer = get_batch_export_writer(writer_format, self.flush, schema=schema, max_bytes=max_bytes, **kwargs) + writer = get_batch_export_writer(self.writer_format, self.flush, schema=schema, max_bytes=max_bytes, **kwargs) record_batches_count = 0 + records_count = 0 await self.logger.adebug("Starting record batch writing loop") - async with writer.open_temporary_file(): - while True: - try: - record_batch = queue.get_nowait() - record_batches_count += 1 - except asyncio.QueueEmpty: - if producer_task.done(): - await self.logger.adebug( - "Empty queue with no more events being produced, closing writer loop and flushing" - ) - self.flush_start_event.set() - # Exit context manager to trigger final flush - break - else: - await asyncio.sleep(0) - continue - - record_batch = cast_record_batch_json_columns(record_batch, json_columns=json_columns) - await writer.write_record_batch(record_batch, flush=True) - - for _ in range(record_batches_count): - queue.task_done() - - await self.logger.adebug("Consumed %s records", writer.records_total) + + writer._batch_export_file = await asyncio.to_thread(writer.create_temporary_file) + + async for record_batch in self.generate_record_batches_from_queue(queue, producer_task): + record_batches_count += 1 + record_batch = cast_record_batch_json_columns(record_batch, json_columns=json_columns) + + await writer.write_record_batch(record_batch, flush=False) + + if writer.should_flush(): + records_count += writer.records_since_last_flush + + if multiple_files: + await writer.close_temporary_file() + writer._batch_export_file = await asyncio.to_thread(writer.create_temporary_file) + else: + await writer.flush() + + for _ in range(record_batches_count): + queue.task_done() + record_batches_count = 0 + + records_count += writer.records_since_last_flush + await writer.close_temporary_file() + + await self.logger.adebug("Consumed %s records", records_count) self.heartbeater.set_from_heartbeat_details(self.heartbeat_details) - return writer.records_total + return records_count + + async def generate_record_batches_from_queue( + self, + queue: RecordBatchQueue, + producer_task: asyncio.Task, + ): + """Yield record batches from provided `queue` until `producer_task` is done.""" + while True: + try: + record_batch = queue.get_nowait() + except asyncio.QueueEmpty: + if producer_task.done(): + await self.logger.adebug( + "Empty queue with no more events being produced, closing writer loop and flushing" + ) + break + else: + await asyncio.sleep(0) + continue + + yield record_batch class RecordBatchConsumerRetryableExceptionGroup(ExceptionGroup): @@ -300,7 +332,7 @@ async def run_consumer_loop( max_bytes: int, json_columns: collections.abc.Sequence[str] = ("properties", "person_properties", "set", "set_once"), writer_file_kwargs: collections.abc.Mapping[str, typing.Any] | None = None, - non_retryable_error_types: collections.abc.Sequence[str] = (), + multiple_files: bool = False, **kwargs, ) -> int: """Run record batch consumers in a loop. @@ -339,15 +371,15 @@ def consumer_done_callback(task: asyncio.Task): await logger.adebug("Starting record batch consumer loop") - consumer = consumer_cls(heartbeater, heartbeat_details, data_interval_start, **kwargs) + consumer = consumer_cls(heartbeater, heartbeat_details, data_interval_start, writer_format, **kwargs) consumer_task = asyncio.create_task( consumer.start( queue=queue, producer_task=producer_task, - writer_format=writer_format, max_bytes=max_bytes, schema=schema, json_columns=json_columns, + multiple_files=multiple_files, **writer_file_kwargs or {}, ), name=f"record_batch_consumer_{consumer_number}", diff --git a/posthog/temporal/batch_exports/temporary_file.py b/posthog/temporal/batch_exports/temporary_file.py index 4da99e74a273e5..afe91d42412a3f 100644 --- a/posthog/temporal/batch_exports/temporary_file.py +++ b/posthog/temporal/batch_exports/temporary_file.py @@ -350,27 +350,33 @@ async def open_temporary_file(self, current_flush_counter: int = 0): self.reset_writer_tracking() self.flush_counter = current_flush_counter - with BatchExportTemporaryFile(**self.file_kwargs) as temp_file: + with self.create_temporary_file() as temp_file: self._batch_export_file = temp_file try: - yield + yield self except Exception as temp_err: self.error = temp_err raise finally: - self.track_bytes_written(temp_file) + await self.close_temporary_file() - if self.bytes_since_last_flush > 0: - # `bytes_since_last_flush` should be 0 unless: - # 1. The last batch wasn't flushed as it didn't reach `max_bytes`. - # 2. The last batch was flushed but there was another write after the last call to - # `write_record_batch`. For example, footer bytes. - await self.flush(is_last=True) + async def close_temporary_file(self): + self.track_bytes_written(self.batch_export_file) + + if self.bytes_since_last_flush > 0: + # `bytes_since_last_flush` should be 0 unless: + # 1. The last batch wasn't flushed as it didn't reach `max_bytes`. + # 2. The last batch was flushed but there was another write after the last call to + # `write_record_batch`. For example, footer bytes. + await self.flush(is_last=True) - self._batch_export_file = None + self._batch_export_file = None + + def create_temporary_file(self) -> BatchExportTemporaryFile: + return BatchExportTemporaryFile(**self.file_kwargs) @property def batch_export_file(self): @@ -686,16 +692,13 @@ def parquet_writer(self) -> pq.ParquetWriter: ) return self._parquet_writer - @contextlib.asynccontextmanager - async def open_temporary_file(self, current_flush_counter: int = 0): + async def close_temporary_file(self): """Ensure underlying Parquet writer is closed before flushing and closing temporary file.""" - async with super().open_temporary_file(current_flush_counter): - try: - yield - finally: - if self._parquet_writer is not None: - self._parquet_writer.writer.close() - self._parquet_writer = None + if self._parquet_writer is not None: + self._parquet_writer.writer.close() + self._parquet_writer = None + + await super().close_temporary_file() def _write_record_batch(self, record_batch: pa.RecordBatch) -> None: """Write records to a temporary file as Parquet.""" diff --git a/posthog/temporal/common/clickhouse.py b/posthog/temporal/common/clickhouse.py index 0cd0d909f21c95..f9c9516da907de 100644 --- a/posthog/temporal/common/clickhouse.py +++ b/posthog/temporal/common/clickhouse.py @@ -6,11 +6,11 @@ import ssl import typing import uuid -import structlog import aiohttp import pyarrow as pa import requests +import structlog from django.conf import settings import posthog.temporal.common.asyncpa as asyncpa @@ -511,6 +511,7 @@ async def get_client( max_memory_usage=settings.CLICKHOUSE_MAX_MEMORY_USAGE, max_block_size=max_block_size, output_format_arrow_string_as_string="true", + http_send_timeout=0, **kwargs, ) as client: yield client diff --git a/posthog/temporal/tests/batch_exports/conftest.py b/posthog/temporal/tests/batch_exports/conftest.py index 67c321205a14f5..7044d8fe968680 100644 --- a/posthog/temporal/tests/batch_exports/conftest.py +++ b/posthog/temporal/tests/batch_exports/conftest.py @@ -152,8 +152,8 @@ async def create_clickhouse_tables_and_views(clickhouse_client, django_db_setup) from posthog.batch_exports.sql import ( CREATE_EVENTS_BATCH_EXPORT_VIEW, CREATE_EVENTS_BATCH_EXPORT_VIEW_BACKFILL, - CREATE_EVENTS_BATCH_EXPORT_VIEW_UNBOUNDED, CREATE_EVENTS_BATCH_EXPORT_VIEW_RECENT, + CREATE_EVENTS_BATCH_EXPORT_VIEW_UNBOUNDED, CREATE_PERSONS_BATCH_EXPORT_VIEW, CREATE_PERSONS_BATCH_EXPORT_VIEW_BACKFILL, ) @@ -211,8 +211,12 @@ def data_interval_start(request, data_interval_end, interval): @pytest.fixture -def data_interval_end(interval): +def data_interval_end(request, interval): """Set a test data interval end.""" + try: + return request.param + except AttributeError: + pass return dt.datetime(2023, 4, 25, 15, 0, 0, tzinfo=dt.UTC) diff --git a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py index b4ec82c7eacfca..0fe8f01cd2366a 100644 --- a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py @@ -4,12 +4,14 @@ import operator import os import typing +import unittest.mock import uuid import warnings import pyarrow as pa import pytest import pytest_asyncio +from django.test import override_settings from freezegun.api import freeze_time from google.cloud import bigquery from temporalio import activity @@ -361,11 +363,86 @@ async def test_insert_into_bigquery_activity_inserts_data_into_bigquery_table( **bigquery_config, ) - with freeze_time(TEST_TIME) as frozen_time: + with freeze_time(TEST_TIME) as frozen_time, override_settings(BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES=1): + await activity_environment.run(insert_into_bigquery_activity, insert_inputs) + + ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC) + + await assert_clickhouse_records_in_bigquery( + bigquery_client=bigquery_client, + clickhouse_client=clickhouse_client, + table_id=f"test_insert_activity_table_{ateam.pk}", + dataset_id=bigquery_dataset.dataset_id, + team_id=ateam.pk, + date_ranges=[(data_interval_start, data_interval_end)], + exclude_events=exclude_events, + include_events=None, + batch_export_model=model, + use_json_type=use_json_type, + min_ingested_timestamp=ingested_timestamp, + sort_key="person_id" + if batch_export_model is not None and batch_export_model.name == "persons" + else "event", + ) + + +@pytest.mark.parametrize("use_json_type", [True], indirect=True) +@pytest.mark.parametrize("model", TEST_MODELS) +async def test_insert_into_bigquery_activity_inserts_data_into_bigquery_table_without_query_permissions( + clickhouse_client, + activity_environment, + bigquery_client, + bigquery_config, + exclude_events, + bigquery_dataset, + use_json_type, + model: BatchExportModel | BatchExportSchema | None, + generate_test_data, + data_interval_start, + data_interval_end, + ateam, +): + """Test that the `insert_into_bigquery_activity` function inserts data into a BigQuery table. + + For this test we mock the `acheck_for_query_permissions_on_table` method to assert the + behavior of the activity function when lacking query permissions in BigQuery. + """ + if isinstance(model, BatchExportModel) and model.name == "persons": + pytest.skip("Unnecessary test case as person batch export requires query permissions") + + batch_export_schema: BatchExportSchema | None = None + batch_export_model: BatchExportModel | None = None + if isinstance(model, BatchExportModel): + batch_export_model = model + elif model is not None: + batch_export_schema = model + + insert_inputs = BigQueryInsertInputs( + team_id=ateam.pk, + table_id=f"test_insert_activity_table_{ateam.pk}", + dataset_id=bigquery_dataset.dataset_id, + data_interval_start=data_interval_start.isoformat(), + data_interval_end=data_interval_end.isoformat(), + exclude_events=exclude_events, + use_json_type=use_json_type, + batch_export_schema=batch_export_schema, + batch_export_model=batch_export_model, + **bigquery_config, + ) + + with ( + freeze_time(TEST_TIME) as frozen_time, + override_settings(BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES=1), + unittest.mock.patch( + "posthog.temporal.batch_exports.bigquery_batch_export.BigQueryClient.acheck_for_query_permissions_on_table", + return_value=False, + ) as mocked_check, + ): await activity_environment.run(insert_into_bigquery_activity, insert_inputs) ingested_timestamp = frozen_time().replace(tzinfo=dt.UTC) + mocked_check.assert_called_once() await assert_clickhouse_records_in_bigquery( bigquery_client=bigquery_client, clickhouse_client=clickhouse_client, diff --git a/posthog/temporal/tests/batch_exports/test_monitoring.py b/posthog/temporal/tests/batch_exports/test_monitoring.py new file mode 100644 index 00000000000000..cab50c25d3177a --- /dev/null +++ b/posthog/temporal/tests/batch_exports/test_monitoring.py @@ -0,0 +1,201 @@ +import datetime as dt +import uuid + +import pytest +import pytest_asyncio +from temporalio.common import RetryPolicy +from temporalio.testing import WorkflowEnvironment +from temporalio.worker import UnsandboxedWorkflowRunner, Worker + +from posthog import constants +from posthog.batch_exports.models import BatchExportRun +from posthog.temporal.batch_exports.monitoring import ( + BatchExportMonitoringInputs, + BatchExportMonitoringWorkflow, + get_batch_export, + get_event_counts, + update_batch_export_runs, +) +from posthog.temporal.tests.utils.models import ( + acreate_batch_export, + adelete_batch_export, + afetch_batch_export_runs, +) + +pytestmark = [pytest.mark.asyncio, pytest.mark.django_db] + +GENERATE_TEST_DATA_END = dt.datetime.now(tz=dt.UTC).replace( + minute=0, second=0, microsecond=0, tzinfo=dt.UTC +) - dt.timedelta(hours=1) +GENERATE_TEST_DATA_START = GENERATE_TEST_DATA_END - dt.timedelta(hours=1) + + +@pytest_asyncio.fixture +async def batch_export(ateam, temporal_client): + """Provide a batch export for tests, not intended to be used.""" + destination_data = { + "type": "S3", + "config": { + "bucket_name": "a-bucket", + "region": "us-east-1", + "prefix": "a-key", + "aws_access_key_id": "object_storage_root_user", + "aws_secret_access_key": "object_storage_root_password", + }, + } + + batch_export_data = { + "name": "my-production-s3-bucket-destination", + "destination": destination_data, + "interval": "every 5 minutes", + } + + batch_export = await acreate_batch_export( + team_id=ateam.pk, + name=batch_export_data["name"], # type: ignore + destination_data=batch_export_data["destination"], # type: ignore + interval=batch_export_data["interval"], # type: ignore + ) + + yield batch_export + + await adelete_batch_export(batch_export, temporal_client) + + +@pytest_asyncio.fixture +async def generate_batch_export_runs( + generate_test_data, + data_interval_start: dt.datetime, + data_interval_end: dt.datetime, + interval: str, + batch_export, +): + # to keep things simple for now, we assume 5 min interval + if interval != "every 5 minutes": + raise NotImplementedError("Only 5 minute intervals are supported for now. Please update the test.") + + events_created, _ = generate_test_data + + batch_export_runs: list[BatchExportRun] = [] + interval_start = data_interval_start + interval_end = interval_start + dt.timedelta(minutes=5) + while interval_end <= data_interval_end: + run = BatchExportRun( + batch_export_id=batch_export.id, + data_interval_start=interval_start, + data_interval_end=interval_end, + status="completed", + records_completed=len( + [ + e + for e in events_created + if interval_start + <= dt.datetime.fromisoformat(e["inserted_at"]).replace(tzinfo=dt.UTC) + < interval_end + ] + ), + ) + await run.asave() + batch_export_runs.append(run) + interval_start = interval_end + interval_end += dt.timedelta(minutes=5) + + yield + + for run in batch_export_runs: + await run.adelete() + + +async def test_monitoring_workflow_when_no_event_data(batch_export): + workflow_id = str(uuid.uuid4()) + inputs = BatchExportMonitoringInputs(batch_export_id=batch_export.id) + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + # TODO - not sure if this is the right task queue + task_queue=constants.BATCH_EXPORTS_TASK_QUEUE, + workflows=[BatchExportMonitoringWorkflow], + activities=[ + get_batch_export, + get_event_counts, + update_batch_export_runs, + ], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + batch_export_runs_updated = await activity_environment.client.execute_workflow( + BatchExportMonitoringWorkflow.run, + inputs, + id=workflow_id, + task_queue=constants.BATCH_EXPORTS_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + execution_timeout=dt.timedelta(seconds=30), + ) + assert batch_export_runs_updated == 0 + + +@pytest.mark.parametrize( + "data_interval_start", + [GENERATE_TEST_DATA_START], + indirect=True, +) +@pytest.mark.parametrize( + "data_interval_end", + [GENERATE_TEST_DATA_END], + indirect=True, +) +@pytest.mark.parametrize( + "interval", + ["every 5 minutes"], + indirect=True, +) +async def test_monitoring_workflow( + batch_export, + generate_test_data, + data_interval_start, + data_interval_end, + interval, + generate_batch_export_runs, +): + """Test the monitoring workflow with a batch export that has data. + + We generate 2 hours of data between 13:00 and 15:00, and then run the + monitoring workflow at 15:30. The monitoring workflow should check the data + between 14:00 and 15:00, and update the batch export runs. + + We generate some dummy batch export runs based on the event data we + generated and assert that the expected records count matches the records + completed. + """ + workflow_id = str(uuid.uuid4()) + inputs = BatchExportMonitoringInputs(batch_export_id=batch_export.id) + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + # TODO - not sure if this is the right task queue + task_queue=constants.BATCH_EXPORTS_TASK_QUEUE, + workflows=[BatchExportMonitoringWorkflow], + activities=[ + get_batch_export, + get_event_counts, + update_batch_export_runs, + ], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + await activity_environment.client.execute_workflow( + BatchExportMonitoringWorkflow.run, + inputs, + id=workflow_id, + task_queue=constants.BATCH_EXPORTS_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + execution_timeout=dt.timedelta(seconds=30), + ) + + batch_export_runs = await afetch_batch_export_runs(batch_export_id=batch_export.id) + + for run in batch_export_runs: + if run.records_completed == 0: + # TODO: in the actual monitoring activity it would be better to + # update the actual count to 0 rather than None + assert run.records_total_count is None + else: + assert run.records_completed == run.records_total_count diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py index dc0de17e53d581..76a3c20599518e 100644 --- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py @@ -29,6 +29,7 @@ from posthog.temporal.batch_exports.s3_batch_export import ( FILE_FORMAT_EXTENSIONS, IntermittentUploadPartTimeoutError, + InvalidS3EndpointError, S3BatchExportInputs, S3BatchExportWorkflow, S3HeartbeatDetails, @@ -40,9 +41,7 @@ ) from posthog.temporal.common.clickhouse import ClickHouseClient from posthog.temporal.tests.batch_exports.utils import mocked_start_batch_export_run -from posthog.temporal.tests.utils.events import ( - generate_test_events_in_clickhouse, -) +from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse from posthog.temporal.tests.utils.models import ( acreate_batch_export, adelete_batch_export, @@ -1576,6 +1575,23 @@ async def client(self, *args, **kwargs): await s3_upload.upload_part(io.BytesIO(b"1010"), rewind=False) # type: ignore +async def test_s3_multi_part_upload_raises_exception_if_invalid_endpoint(bucket_name, s3_key_prefix): + """Test a InvalidS3EndpointError is raised if the endpoint is invalid.""" + s3_upload = S3MultiPartUpload( + bucket_name=bucket_name, + key=s3_key_prefix, + encryption=None, + kms_key_id=None, + region_name="us-east-1", + aws_access_key_id="object_storage_root_user", + aws_secret_access_key="object_storage_root_password", + endpoint_url="some-invalid-endpoint", + ) + + with pytest.raises(InvalidS3EndpointError): + await s3_upload.start() + + @pytest.mark.parametrize("model", [TEST_S3_MODELS[1], TEST_S3_MODELS[2], None]) async def test_s3_export_workflow_with_request_timeouts( clickhouse_client, diff --git a/posthog/test/__snapshots__/test_feature_flag.ambr b/posthog/test/__snapshots__/test_feature_flag.ambr index 2a478be1ac0ddb..26e3b9acfa02a9 100644 --- a/posthog/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/test/__snapshots__/test_feature_flag.ambr @@ -490,9 +490,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -859,9 +924,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -905,6 +1035,31 @@ AND "posthog_person"."team_id" = 99999) ''' # --- +# name: TestFeatureFlagMatcher.test_db_matches_independent_of_string_or_number_type.28 + ''' + SELECT ((("posthog_person"."properties" -> 'Distinct Id') IN ('"307"'::jsonb) + OR ("posthog_person"."properties" -> 'Distinct Id') IN ('307'::jsonb)) + AND "posthog_person"."properties" ? 'Distinct Id' + AND NOT (("posthog_person"."properties" -> 'Distinct Id') = 'null'::jsonb)) AS "flag_X_condition_0" + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") + WHERE ("posthog_persondistinctid"."distinct_id" = '307' + AND "posthog_persondistinctid"."team_id" = 99999 + AND "posthog_person"."team_id" = 99999) + ''' +# --- +# name: TestFeatureFlagMatcher.test_db_matches_independent_of_string_or_number_type.29 + ''' + SELECT (("posthog_person"."properties" -> 'Distinct Id') IN ('307'::jsonb) + AND "posthog_person"."properties" ? 'Distinct Id' + AND NOT (("posthog_person"."properties" -> 'Distinct Id') = 'null'::jsonb)) AS "flag_X_condition_0" + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") + WHERE ("posthog_persondistinctid"."distinct_id" = '307' + AND "posthog_persondistinctid"."team_id" = 99999 + AND "posthog_person"."team_id" = 99999) + ''' +# --- # name: TestFeatureFlagMatcher.test_db_matches_independent_of_string_or_number_type.3 ''' SELECT "posthog_remoteconfig"."id", @@ -917,6 +1072,18 @@ LIMIT 21 ''' # --- +# name: TestFeatureFlagMatcher.test_db_matches_independent_of_string_or_number_type.30 + ''' + SELECT (("posthog_person"."properties" -> 'Distinct Id') = '307'::jsonb + AND "posthog_person"."properties" ? 'Distinct Id' + AND NOT (("posthog_person"."properties" -> 'Distinct Id') = 'null'::jsonb)) AS "flag_X_condition_0" + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") + WHERE ("posthog_persondistinctid"."distinct_id" = '307' + AND "posthog_persondistinctid"."team_id" = 99999 + AND "posthog_person"."team_id" = 99999) + ''' +# --- # name: TestFeatureFlagMatcher.test_db_matches_independent_of_string_or_number_type.4 ''' SELECT "posthog_team"."id", @@ -1116,9 +1283,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -1253,6 +1485,18 @@ AND "posthog_person"."team_id" = 99999) ''' # --- +# name: TestFeatureFlagMatcher.test_invalid_regex_match_flag.11 + ''' + SELECT (("posthog_person"."properties" ->> 'email')::text ~ '["neil@x.com"]' + AND "posthog_person"."properties" ? 'email' + AND NOT (("posthog_person"."properties" -> 'email') = 'null'::jsonb)) AS "flag_X_condition_0" + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") + WHERE ("posthog_persondistinctid"."distinct_id" = 'another_id' + AND "posthog_persondistinctid"."team_id" = 99999 + AND "posthog_person"."team_id" = 99999) + ''' +# --- # name: TestFeatureFlagMatcher.test_invalid_regex_match_flag.2 ''' SELECT "posthog_team"."id", @@ -1527,9 +1771,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', @@ -1905,6 +2214,43 @@ AND "posthog_person"."team_id" = 99999) ''' # --- +# name: TestFeatureFlagMatcher.test_with_sql_injection_properties_and_other_aliases.11 + ''' + SELECT (((("posthog_person"."properties" -> 'number space') > '"100"'::jsonb + AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number space')) = ('string')) + OR (("posthog_person"."properties" -> 'number space') > '100.0'::jsonb + AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number space')) = ('number'))) + AND "posthog_person"."properties" ? 'number space' + AND NOT (("posthog_person"."properties" -> 'number space') = 'null'::jsonb) + AND ((JSONB_TYPEOF(("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;')) = ('string') + AND ("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') > '"100"'::jsonb) + OR (JSONB_TYPEOF(("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;')) = ('number') + AND ("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') > '100.0'::jsonb)) + AND "posthog_person"."properties" ? ';''" SELECT 1; DROP TABLE posthog_featureflag;' + AND NOT (("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') = 'null'::jsonb)) AS "flag_X_condition_0", + (((JSONB_TYPEOF(("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;')) = ('string') + AND ("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') > '"100"'::jsonb) + OR (JSONB_TYPEOF(("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;')) = ('number') + AND ("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') > '100.0'::jsonb)) + AND "posthog_person"."properties" ? ';''" SELECT 1; DROP TABLE posthog_featureflag;' + AND NOT (("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') = 'null'::jsonb)) AS "flag_X_condition_1", + (((("posthog_person"."properties" -> 'version!!!') > '"1.05"'::jsonb + AND JSONB_TYPEOF(("posthog_person"."properties" -> 'version!!!')) = ('string')) + OR (("posthog_person"."properties" -> 'version!!!') > '1.05'::jsonb + AND JSONB_TYPEOF(("posthog_person"."properties" -> 'version!!!')) = ('number'))) + AND "posthog_person"."properties" ? 'version!!!' + AND NOT (("posthog_person"."properties" -> 'version!!!') = 'null'::jsonb)) AS "flag_X_condition_2", + ((("posthog_person"."properties" -> 'nested_prop --random #comment //test') = '"21"'::jsonb + OR ("posthog_person"."properties" -> 'nested_prop --random #comment //test') = '21'::jsonb) + AND "posthog_person"."properties" ? 'nested_prop --random #comment //test' + AND NOT (("posthog_person"."properties" -> 'nested_prop --random #comment //test') = 'null'::jsonb)) AS "flag_X_condition_3" + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") + WHERE ("posthog_persondistinctid"."distinct_id" = '307' + AND "posthog_persondistinctid"."team_id" = 99999 + AND "posthog_person"."team_id" = 99999) + ''' +# --- # name: TestFeatureFlagMatcher.test_with_sql_injection_properties_and_other_aliases.2 ''' SELECT "posthog_team"."id", @@ -2179,9 +2525,74 @@ "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", "posthog_hogfunction"."filters", + "posthog_hogfunction"."mappings", "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id" + "posthog_hogfunction"."template_id", + "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") WHERE ("posthog_hogfunction"."enabled" AND "posthog_hogfunction"."team_id" = 99999 AND "posthog_hogfunction"."type" IN ('site_destination', diff --git a/posthog/test/test_middleware.py b/posthog/test/test_middleware.py index e6a9e95ac9ba01..2d987bc2795e11 100644 --- a/posthog/test/test_middleware.py +++ b/posthog/test/test_middleware.py @@ -501,7 +501,8 @@ def test_logout(self): self.assertNotIn("ph_current_project_name", response.cookies) -@override_settings(IMPERSONATION_TIMEOUT_SECONDS=30) +@override_settings(IMPERSONATION_TIMEOUT_SECONDS=100) +@override_settings(IMPERSONATION_IDLE_TIMEOUT_SECONDS=20) class TestAutoLogoutImpersonateMiddleware(APIBaseTest): other_user: User @@ -538,21 +539,65 @@ def test_not_staff_user_cannot_login(self): assert response.status_code == 200 assert self.client.get("/api/users/@me").json()["email"] == self.user.email - def test_after_timeout_api_requests_401(self): - now = datetime.now() + def test_after_idle_timeout_api_requests_401(self): + now = datetime(2024, 1, 1, 12, 0, 0) with freeze_time(now): self.login_as_other_user() res = self.client.get("/api/users/@me") assert res.status_code == 200 assert res.json()["email"] == "other-user@posthog.com" + assert res.json()["is_impersonated_until"] == "2024-01-01T12:00:20+00:00" assert self.client.session.get("session_created_at") == now.timestamp() - with freeze_time(now + timedelta(seconds=10)): + # Move forward by 19 + now = now + timedelta(seconds=19) + with freeze_time(now): res = self.client.get("/api/users/@me") assert res.status_code == 200 assert res.json()["email"] == "other-user@posthog.com" + assert res.json()["is_impersonated_until"] == "2024-01-01T12:00:39+00:00" - with freeze_time(now + timedelta(seconds=35)): + # Past idle timeout + now = now + timedelta(seconds=21) + + with freeze_time(now): + res = self.client.get("/api/users/@me") + assert res.status_code == 401 + + def test_after_total_timeout_api_requests_401(self): + now = datetime(2024, 1, 1, 12, 0, 0) + with freeze_time(now): + self.login_as_other_user() + res = self.client.get("/api/users/@me") + assert res.status_code == 200 + assert res.json()["email"] == "other-user@posthog.com" + assert res.json()["is_impersonated_until"] == "2024-01-01T12:00:20+00:00" + assert self.client.session.get("session_created_at") == now.timestamp() + + for _ in range(4): + # Move forward by 19 seconds 4 times for a total of 76 seconds + now = now + timedelta(seconds=19) + with freeze_time(now): + res = self.client.get("/api/users/@me") + assert res.status_code == 200 + assert res.json()["email"] == "other-user@posthog.com" + # Format exactly like the date above + assert res.json()["is_impersonated_until"] == (now + timedelta(seconds=20)).strftime( + "%Y-%m-%dT%H:%M:%S+00:00" + ) + + now = now + timedelta(seconds=19) + with freeze_time(now): + res = self.client.get("/api/users/@me") + assert res.status_code == 200 + assert res.json()["email"] == "other-user@posthog.com" + # Even though below the idle timeout, we now see the total timeout as that is earlier + assert res.json()["is_impersonated_until"] == "2024-01-01T12:01:40+00:00" + + # Now even less than the idle time will take us past the total timeout + now = now + timedelta(seconds=10) + + with freeze_time(now): res = self.client.get("/api/users/@me") assert res.status_code == 401 diff --git a/posthog/udf_versioner.py b/posthog/udf_versioner.py index 4b9554dba90a70..50011d1d94c5ba 100644 --- a/posthog/udf_versioner.py +++ b/posthog/udf_versioner.py @@ -13,10 +13,10 @@ # 4. Land a version of the posthog repo with the updated `user_scripts` folder from the new branch (make sure this PR doesn't include changes to this file with the new version) # 5. Run the `copy_udfs_to_clickhouse` action in the `posthog_cloud_infra` repo to deploy the `user_scripts` folder to clickhouse # 6. After that deploy goes out, it is safe to land and deploy the full changes to the `posthog` repo -UDF_VERSION = 3 # Last modified by: @aspicer, 2024-10-30 +UDF_VERSION = 4 # Last modified by: @aspicer, 2024-12-6 # Clean up all versions less than this -EARLIEST_UDF_VERSION = 2 +EARLIEST_UDF_VERSION = 3 CLICKHOUSE_XML_FILENAME = "user_defined_function.xml" ACTIVE_XML_CONFIG = "../../docker/clickhouse/user_defined_function.xml" diff --git a/posthog/user_scripts/aggregate_funnel_aarch64 b/posthog/user_scripts/aggregate_funnel_aarch64 index 6c490d2e58b869..470c0e61a820b5 100755 Binary files a/posthog/user_scripts/aggregate_funnel_aarch64 and b/posthog/user_scripts/aggregate_funnel_aarch64 differ diff --git a/posthog/user_scripts/aggregate_funnel_x86_64 b/posthog/user_scripts/aggregate_funnel_x86_64 index 9570bf6d55e29e..fed097f05fe0ad 100755 Binary files a/posthog/user_scripts/aggregate_funnel_x86_64 and b/posthog/user_scripts/aggregate_funnel_x86_64 differ diff --git a/posthog/user_scripts/latest_user_defined_function.xml b/posthog/user_scripts/latest_user_defined_function.xml index a7da950ae23e28..95af85d63412f1 100644 --- a/posthog/user_scripts/latest_user_defined_function.xml +++ b/posthog/user_scripts/latest_user_defined_function.xml @@ -1,8 +1,8 @@ - executable_pool - aggregate_funnel_v2 + aggregate_funnel_v3 Array(Tuple(Int8, Nullable(String), Array(Float64), Array(Array(UUID)))) result @@ -30,13 +30,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v2/aggregate_funnel steps + v3/aggregate_funnel steps 600 executable_pool - aggregate_funnel_cohort_v2 + aggregate_funnel_cohort_v3 Array(Tuple(Int8, UInt64, Array(Float64), Array(Array(UUID)))) result @@ -64,13 +64,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v2/aggregate_funnel steps + v3/aggregate_funnel steps 600 executable_pool - aggregate_funnel_array_v2 + aggregate_funnel_array_v3 Array(Tuple(Int8, Array(String), Array(Float64), Array(Array(UUID)))) result @@ -98,13 +98,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v2/aggregate_funnel steps + v3/aggregate_funnel steps 600 executable_pool - aggregate_funnel_test_v2 + aggregate_funnel_test_v3 String result @@ -132,13 +132,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v2/aggregate_funnel_test.py + v3/aggregate_funnel_test.py 600 executable_pool - aggregate_funnel_trends_v2 + aggregate_funnel_trends_v3 Array(Tuple(UInt64, Int8, Nullable(String), UUID)) result @@ -174,13 +174,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v2/aggregate_funnel trends + v3/aggregate_funnel trends 600 executable_pool - aggregate_funnel_array_trends_v2 + aggregate_funnel_array_trends_v3 Array(Tuple(UInt64, Int8, Array(String), UUID)) result @@ -213,13 +213,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v2/aggregate_funnel trends + v3/aggregate_funnel trends 600 executable_pool - aggregate_funnel_cohort_trends_v2 + aggregate_funnel_cohort_trends_v3 Array(Tuple(UInt64, Int8, UInt64, UUID)) result @@ -252,13 +252,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v2/aggregate_funnel trends + v3/aggregate_funnel trends 600 executable_pool - aggregate_funnel_array_trends_test_v2 + aggregate_funnel_array_trends_test_v3 String result @@ -290,12 +290,12 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v2/aggregate_funnel_array_trends_test.py + v3/aggregate_funnel_array_trends_test.py 600 executable_pool - aggregate_funnel_v3 + aggregate_funnel_v4 Array(Tuple(Int8, Nullable(String), Array(Float64), Array(Array(UUID)))) result @@ -323,13 +323,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v3/aggregate_funnel steps + v4/aggregate_funnel steps 600 executable_pool - aggregate_funnel_cohort_v3 + aggregate_funnel_cohort_v4 Array(Tuple(Int8, UInt64, Array(Float64), Array(Array(UUID)))) result @@ -357,13 +357,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v3/aggregate_funnel steps + v4/aggregate_funnel steps 600 executable_pool - aggregate_funnel_array_v3 + aggregate_funnel_array_v4 Array(Tuple(Int8, Array(String), Array(Float64), Array(Array(UUID)))) result @@ -391,13 +391,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v3/aggregate_funnel steps + v4/aggregate_funnel steps 600 executable_pool - aggregate_funnel_test_v3 + aggregate_funnel_test_v4 String result @@ -425,13 +425,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v3/aggregate_funnel_test.py + v4/aggregate_funnel_test.py 600 executable_pool - aggregate_funnel_trends_v3 + aggregate_funnel_trends_v4 Array(Tuple(UInt64, Int8, Nullable(String), UUID)) result @@ -467,13 +467,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v3/aggregate_funnel trends + v4/aggregate_funnel trends 600 executable_pool - aggregate_funnel_array_trends_v3 + aggregate_funnel_array_trends_v4 Array(Tuple(UInt64, Int8, Array(String), UUID)) result @@ -506,13 +506,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v3/aggregate_funnel trends + v4/aggregate_funnel trends 600 executable_pool - aggregate_funnel_cohort_trends_v3 + aggregate_funnel_cohort_trends_v4 Array(Tuple(UInt64, Int8, UInt64, UUID)) result @@ -545,13 +545,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v3/aggregate_funnel trends + v4/aggregate_funnel trends 600 executable_pool - aggregate_funnel_array_trends_test_v3 + aggregate_funnel_array_trends_test_v4 String result @@ -583,7 +583,7 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v3/aggregate_funnel_array_trends_test.py + v4/aggregate_funnel_array_trends_test.py 600 \ No newline at end of file diff --git a/posthog/user_scripts/v4/aggregate_funnel b/posthog/user_scripts/v4/aggregate_funnel new file mode 100755 index 00000000000000..e62dd751484cb5 --- /dev/null +++ b/posthog/user_scripts/v4/aggregate_funnel @@ -0,0 +1,9 @@ +#!/bin/sh + +DIR_NAME=$(dirname "$0") + +case $( uname -m ) in +aarch64) $DIR_NAME/aggregate_funnel_aarch64 "$@";; +*) $DIR_NAME/aggregate_funnel_x86_64 "$@";; +esac + diff --git a/posthog/user_scripts/v4/aggregate_funnel_aarch64 b/posthog/user_scripts/v4/aggregate_funnel_aarch64 new file mode 100755 index 00000000000000..470c0e61a820b5 Binary files /dev/null and b/posthog/user_scripts/v4/aggregate_funnel_aarch64 differ diff --git a/posthog/user_scripts/v4/aggregate_funnel_array_trends_test.py b/posthog/user_scripts/v4/aggregate_funnel_array_trends_test.py new file mode 100755 index 00000000000000..cf6ab4e33741b0 --- /dev/null +++ b/posthog/user_scripts/v4/aggregate_funnel_array_trends_test.py @@ -0,0 +1,13 @@ +#!/usr/bin/python3 + +import sys +import json + +if __name__ == "__main__": + for line in sys.stdin: + try: + print(json.dumps({"result": line})) # noqa: T201 + # calculate_funnel_trends_from_user_events(*parse_args(line)) + except Exception as e: + print(json.dumps({"result": json.dumps(str(e))}), end="\n") # noqa: T201 + sys.stdout.flush() diff --git a/posthog/user_scripts/v4/aggregate_funnel_test.py b/posthog/user_scripts/v4/aggregate_funnel_test.py new file mode 100755 index 00000000000000..1eae7c9f369142 --- /dev/null +++ b/posthog/user_scripts/v4/aggregate_funnel_test.py @@ -0,0 +1,14 @@ +#!/usr/bin/python3 +import json + +import sys +import traceback + +if __name__ == "__main__": + for line in sys.stdin: + try: + # calculate_funnel_from_user_events(*parse_args(line)) + print(json.dumps({"result": line})) # noqa: T201 + except Exception as e: + print(json.dumps({"result": json.dumps(str(e) + traceback.format_exc())}), end="\n") # noqa: T201 + sys.stdout.flush() diff --git a/posthog/user_scripts/v4/aggregate_funnel_x86_64 b/posthog/user_scripts/v4/aggregate_funnel_x86_64 new file mode 100755 index 00000000000000..fed097f05fe0ad Binary files /dev/null and b/posthog/user_scripts/v4/aggregate_funnel_x86_64 differ diff --git a/posthog/user_scripts/v4/user_defined_function.xml b/posthog/user_scripts/v4/user_defined_function.xml new file mode 100644 index 00000000000000..95af85d63412f1 --- /dev/null +++ b/posthog/user_scripts/v4/user_defined_function.xml @@ -0,0 +1,589 @@ + + + executable_pool + aggregate_funnel_v3 + Array(Tuple(Int8, Nullable(String), Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Nullable(String), Array(Int8))) + value + + JSONEachRow + v3/aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_cohort_v3 + Array(Tuple(Int8, UInt64, Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, UInt64, Array(Int8))) + value + + JSONEachRow + v3/aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_array_v3 + Array(Tuple(Int8, Array(String), Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Array(String), Array(Int8))) + value + + JSONEachRow + v3/aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_test_v3 + String + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Nullable(String), Array(Int8))) + value + + JSONEachRow + v3/aggregate_funnel_test.py + 600 + + + + executable_pool + aggregate_funnel_trends_v3 + Array(Tuple(UInt64, Int8, Nullable(String), UUID)) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, UUID, Nullable(String), Array(Int8))) + value + + JSONEachRow + v3/aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_array_trends_v3 + + Array(Tuple(UInt64, Int8, Array(String), UUID)) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, UUID, Array(String), Array(Int8))) + value + + JSONEachRow + v3/aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_cohort_trends_v3 + + Array(Tuple(UInt64, Int8, UInt64, UUID)) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, UUID, UInt64, Array(Int8))) + value + + JSONEachRow + v3/aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_array_trends_test_v3 + String + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, UUID, Array(String), Array(Int8))) + value + + JSONEachRow + v3/aggregate_funnel_array_trends_test.py + 600 + + + executable_pool + aggregate_funnel_v4 + Array(Tuple(Int8, Nullable(String), Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Nullable(String), Array(Int8))) + value + + JSONEachRow + v4/aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_cohort_v4 + Array(Tuple(Int8, UInt64, Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, UInt64, Array(Int8))) + value + + JSONEachRow + v4/aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_array_v4 + Array(Tuple(Int8, Array(String), Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Array(String), Array(Int8))) + value + + JSONEachRow + v4/aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_test_v4 + String + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Nullable(String), Array(Int8))) + value + + JSONEachRow + v4/aggregate_funnel_test.py + 600 + + + + executable_pool + aggregate_funnel_trends_v4 + Array(Tuple(UInt64, Int8, Nullable(String), UUID)) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, UUID, Nullable(String), Array(Int8))) + value + + JSONEachRow + v4/aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_array_trends_v4 + + Array(Tuple(UInt64, Int8, Array(String), UUID)) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, UUID, Array(String), Array(Int8))) + value + + JSONEachRow + v4/aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_cohort_trends_v4 + + Array(Tuple(UInt64, Int8, UInt64, UUID)) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, UUID, UInt64, Array(Int8))) + value + + JSONEachRow + v4/aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_array_trends_test_v4 + String + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, UUID, Array(String), Array(Int8))) + value + + JSONEachRow + v4/aggregate_funnel_array_trends_test.py + 600 + + \ No newline at end of file diff --git a/posthog/warehouse/models/external_data_schema.py b/posthog/warehouse/models/external_data_schema.py index 750c3385db7278..3cb3fcfbce33cf 100644 --- a/posthog/warehouse/models/external_data_schema.py +++ b/posthog/warehouse/models/external_data_schema.py @@ -84,7 +84,12 @@ def update_incremental_field_last_value(self, last_value: Any) -> None: incremental_field_type == IncrementalFieldType.Integer or incremental_field_type == IncrementalFieldType.Numeric ): - last_value_json = last_value_py + if isinstance(last_value_py, int | float): + last_value_json = last_value_py + elif isinstance(last_value_py, datetime): + last_value_json = str(last_value_py) + else: + last_value_json = int(last_value_py) else: last_value_json = str(last_value_py) diff --git a/posthog/warehouse/models/join.py b/posthog/warehouse/models/join.py index b24d6916e93c97..51108c58e578cf 100644 --- a/posthog/warehouse/models/join.py +++ b/posthog/warehouse/models/join.py @@ -113,6 +113,27 @@ def _join_function_for_experiments( if not timestamp_key: raise ResolutionError("experiments_timestamp_key is not set for this join") + whereExpr: list[ast.Expr] = [ + ast.CompareOperation( + op=ast.CompareOperationOp.Eq, + left=ast.Field(chain=["event"]), + right=ast.Constant(value="$feature_flag_called"), + ) + ] + # :HACK: We need to pull the timestamp gt/lt values from node.where.exprs[0] because + # we can't reference the parent data warehouse table in the where clause. + if node.where and hasattr(node.where, "exprs"): + for expr in node.where.exprs: + if isinstance(expr, ast.CompareOperation): + if expr.op == ast.CompareOperationOp.GtEq or expr.op == ast.CompareOperationOp.LtEq: + # Match within hogql string because it could be 'toDateTime(timestamp)' + if isinstance(expr.left, ast.Alias) and timestamp_key in expr.left.expr.to_hogql(): + whereExpr.append( + ast.CompareOperation( + op=expr.op, left=ast.Field(chain=["timestamp"]), right=expr.right + ) + ) + return ast.JoinExpr( table=ast.SelectQuery( select=[ @@ -128,6 +149,7 @@ def _join_function_for_experiments( }.items() ], select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), + where=ast.And(exprs=whereExpr), ), # ASOF JOIN finds the most recent matching event that occurred at or before each data warehouse timestamp. # @@ -162,7 +184,7 @@ def _join_function_for_experiments( ] ), op=ast.CompareOperationOp.Eq, - right=ast.Field(chain=[join_to_add.to_table, "distinct_id"]), + right=ast.Field(chain=[join_to_add.to_table, *self.joining_table_key.split(".")]), ), ast.CompareOperation( left=ast.Field( diff --git a/requirements-dev.txt b/requirements-dev.txt index 41a109d8cb97f4..023c54a43322e2 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,6 +1,10 @@ # This file was autogenerated by uv via the following command: # uv pip compile requirements-dev.in -o requirements-dev.txt -aiohttp==3.9.3 +aiohappyeyeballs==2.4.4 + # via + # -c requirements.txt + # aiohttp +aiohttp==3.11.10 # via # -c requirements.txt # aioresponses @@ -378,6 +382,11 @@ pprintpp==0.4.0 # via pytest-icdiff prance==23.6.21.0 # via -r requirements-dev.in +propcache==0.2.1 + # via + # -c requirements.txt + # aiohttp + # yarl protobuf==4.22.1 # via # -c requirements.txt @@ -672,7 +681,7 @@ wrapt==1.15.0 # deprecated xxhash==3.5.0 # via datasets -yarl==1.9.4 +yarl==1.18.3 # via # -c requirements.txt # aiohttp diff --git a/requirements.in b/requirements.in index 264da25775f891..0b8ee20988ee4a 100644 --- a/requirements.in +++ b/requirements.in @@ -4,7 +4,7 @@ # - `uv pip compile requirements.in -o requirements.txt` # - `uv pip compile requirements-dev.in -o requirements-dev.txt` # -aiohttp>=3.9.0 +aiohttp==3.11.10 aioboto3==12.0.0 aiokafka>=0.8 antlr4-python3-runtime==4.13.1 diff --git a/requirements.txt b/requirements.txt index 3c09a5963c90e9..90812c27c71686 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,7 +6,9 @@ aiobotocore==2.7.0 # via # aioboto3 # s3fs -aiohttp==3.9.3 +aiohappyeyeballs==2.4.4 + # via aiohttp +aiohttp==3.11.10 # via # -r requirements.in # aiobotocore @@ -468,6 +470,10 @@ prometheus-client==0.14.1 # via django-prometheus prompt-toolkit==3.0.39 # via click-repl +propcache==0.2.1 + # via + # aiohttp + # yarl proto-plus==1.22.3 # via google-cloud-bigquery-storage protobuf==4.22.1 @@ -800,7 +806,7 @@ xmlsec==1.3.13 # via # -r requirements.in # python3-saml -yarl==1.9.4 +yarl==1.18.3 # via aiohttp zeep==4.2.1 # via simple-salesforce diff --git a/rust/Cargo.lock b/rust/Cargo.lock index 506c8d333363da..12ea18182d17e0 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -1219,6 +1219,7 @@ dependencies = [ "serde_json", "thiserror", "time", + "tokio", "tracing", "uuid", ] diff --git a/rust/capture/src/token.rs b/rust/capture/src/token.rs index 7924cc9511485e..3e7d99c333a7f4 100644 --- a/rust/capture/src/token.rs +++ b/rust/capture/src/token.rs @@ -12,6 +12,7 @@ pub enum InvalidTokenReason { TooLong, NotAscii, PersonalApiKey, + NullByte, } impl InvalidTokenReason { @@ -22,6 +23,7 @@ impl InvalidTokenReason { // Self::IsNotString => "not_string", Self::TooLong => "too_long", Self::PersonalApiKey => "personal_api_key", + Self::NullByte => "null_byte", } } } @@ -57,6 +59,11 @@ pub fn validate_token(token: &str) -> Result<(), InvalidTokenReason> { return Err(InvalidTokenReason::PersonalApiKey); } + // We refuse tokens with null bytes + if token.contains('\0') { + return Err(InvalidTokenReason::NullByte); + } + Ok(()) } @@ -96,4 +103,12 @@ mod tests { assert!(valid.is_err()); assert_eq!(valid.unwrap_err(), InvalidTokenReason::PersonalApiKey); } + + #[test] + fn blocks_null_byte() { + let valid = validate_token("hello\0there"); + + assert!(valid.is_err()); + assert_eq!(valid.unwrap_err(), InvalidTokenReason::NullByte); + } } diff --git a/rust/common/kafka/Cargo.toml b/rust/common/kafka/Cargo.toml index 715e7bc04265a6..b2bfddd0088c87 100644 --- a/rust/common/kafka/Cargo.toml +++ b/rust/common/kafka/Cargo.toml @@ -18,3 +18,4 @@ tracing = { workspace = true } uuid = { workspace = true } thiserror = { workspace = true } futures = { workspace = true } +tokio = { workspace = true } diff --git a/rust/common/kafka/src/kafka_consumer.rs b/rust/common/kafka/src/kafka_consumer.rs index 0b55027f682d69..79c3be7f986d5f 100644 --- a/rust/common/kafka/src/kafka_consumer.rs +++ b/rust/common/kafka/src/kafka_consumer.rs @@ -6,6 +6,7 @@ use rdkafka::{ ClientConfig, Message, }; use serde::de::DeserializeOwned; +use std::time::Duration; use crate::config::{ConsumerConfig, KafkaConfig}; @@ -97,6 +98,33 @@ impl SingleTopicConsumer { Ok((payload, offset)) } + + pub async fn json_recv_batch( + &self, + max: usize, + timeout: Duration, + ) -> Vec> + where + T: DeserializeOwned, + { + let mut results = Vec::with_capacity(max); + + tokio::select! { + _ = tokio::time::sleep(timeout) => {}, + _ = async { + while results.len() < max { + let result = self.json_recv::().await; + let was_err = result.is_err(); + results.push(result); + if was_err { + break; // Early exit on error, since it might indicate a kafka error or something + } + } + } => {} + } + + results + } } pub struct Offset { diff --git a/rust/cymbal/src/config.rs b/rust/cymbal/src/config.rs index 23968d02d1c4a8..2b0cdaf20a50a8 100644 --- a/rust/cymbal/src/config.rs +++ b/rust/cymbal/src/config.rs @@ -86,6 +86,12 @@ pub struct Config { // Maximum number of lines of pre and post context to get per frame #[envconfig(default = "15")] pub context_line_count: usize, + + #[envconfig(default = "1000")] + pub max_events_per_batch: usize, + + #[envconfig(default = "10")] + pub max_event_batch_wait_seconds: u64, } impl Config { diff --git a/rust/cymbal/src/hack/kafka.rs b/rust/cymbal/src/hack/kafka.rs index c57f27d80b0bd4..cb26faede21656 100644 --- a/rust/cymbal/src/hack/kafka.rs +++ b/rust/cymbal/src/hack/kafka.rs @@ -9,7 +9,10 @@ use rdkafka::{ }; use serde::{de::DeserializeOwned, Serialize}; use serde_json::error::Error as SerdeError; -use std::sync::{Arc, Weak}; +use std::{ + sync::{Arc, Weak}, + time::Duration, +}; use thiserror::Error; use tracing::{debug, error, info}; @@ -145,6 +148,33 @@ impl SingleTopicConsumer { Ok((payload, offset)) } + + pub async fn json_recv_batch( + &self, + max: usize, + timeout: Duration, + ) -> Vec> + where + T: DeserializeOwned, + { + let mut results = Vec::with_capacity(max); + + tokio::select! { + _ = tokio::time::sleep(timeout) => {}, + _ = async { + while results.len() < max { + let result = self.json_recv::().await; + let was_err = result.is_err(); + results.push(result); + if was_err { + break; // Early exit on error, since it might indicate a kafka error or something + } + } + } => {} + } + + results + } } pub struct Offset { diff --git a/rust/cymbal/src/main.rs b/rust/cymbal/src/main.rs index aeef86329462fd..2146c4f46ee6d3 100644 --- a/rust/cymbal/src/main.rs +++ b/rust/cymbal/src/main.rs @@ -58,46 +58,64 @@ async fn main() { start_health_liveness_server(&config, context.clone()); + let batch_wait_time = std::time::Duration::from_secs(config.max_event_batch_wait_seconds); + let batch_size = config.max_events_per_batch; + loop { let whole_loop = common_metrics::timing_guard(MAIN_LOOP_TIME, &[]); context.worker_liveness.report_healthy().await; // Just grab the event as a serde_json::Value and immediately drop it, // we can work out a real type for it later (once we're deployed etc) - let (event, offset): (ClickHouseEvent, _) = match context.kafka_consumer.json_recv().await { - Ok(r) => r, - Err(RecvErr::Kafka(e)) => { - panic!("Kafka error: {}", e) - } - Err(err) => { - // If we failed to parse the message, or it was empty, just log and continue, our - // consumer has already stored the offset for us. - metrics::counter!(ERRORS, "cause" => "recv_err").increment(1); - error!("Error receiving message: {:?}", err); - continue; - } - }; - metrics::counter!(EVENT_RECEIVED).increment(1); - - let event = match handle_event(context.clone(), event).await { - Ok(e) => e, - Err(e) => { - error!("Error handling event: {:?}", e); - // If we get an unhandled error, it means we have some logical error in the code, or a - // dependency is down, and we should just fall over. - panic!("Unhandled error: {:?}", e); - } - }; + let received: Vec> = context + .kafka_consumer + .json_recv_batch(batch_size, batch_wait_time) + .await; + + let mut output = Vec::with_capacity(received.len()); + let mut offsets = Vec::with_capacity(received.len()); + for message in received { + let (event, offset) = match message { + Ok(r) => r, + Err(RecvErr::Kafka(e)) => { + panic!("Kafka error: {}", e) + } + Err(err) => { + // If we failed to parse the message, or it was empty, just log and continue, our + // consumer has already stored the offset for us. + metrics::counter!(ERRORS, "cause" => "recv_err").increment(1); + error!("Error receiving message: {:?}", err); + continue; + } + }; + + metrics::counter!(EVENT_RECEIVED).increment(1); + + let event = match handle_event(context.clone(), event).await { + Ok(e) => e, + Err(e) => { + error!("Error handling event: {:?}", e); + // If we get an unhandled error, it means we have some logical error in the code, or a + // dependency is down, and we should just fall over. + panic!("Unhandled error: {:?}", e); + } + }; + + output.push(event); + offsets.push(offset); + } send_keyed_iter_to_kafka( &context.kafka_producer, &context.config.events_topic, |ev| Some(ev.uuid.to_string()), - &[event], + &output, ) .await .expect("Failed to send event to Kafka"); - offset.store().unwrap(); + for offset in offsets { + offset.store().unwrap(); + } metrics::counter!(STACK_PROCESSED).increment(1); whole_loop.label("finished", "true").fin(); diff --git a/rust/cymbal/src/metric_consts.rs b/rust/cymbal/src/metric_consts.rs index 797e47417f5d78..d0ec3cdab31b39 100644 --- a/rust/cymbal/src/metric_consts.rs +++ b/rust/cymbal/src/metric_consts.rs @@ -11,6 +11,7 @@ pub const STORE_CACHE_MISSES: &str = "cymbal_store_cache_misses"; pub const STORE_CACHED_BYTES: &str = "cymbal_store_cached_bytes"; pub const STORE_CACHE_SIZE: &str = "cymbal_store_cache_size"; pub const STORE_CACHE_EVICTIONS: &str = "cymbal_store_cache_evictions"; +pub const STORE_CACHE_EVICTION_RUNS: &str = "cymbal_store_cache_eviction_runs"; pub const MAIN_LOOP_TIME: &str = "cymbal_main_loop_time"; pub const PER_FRAME_TIME: &str = "cymbal_per_frame_time"; pub const PER_STACK_TIME: &str = "cymbal_per_stack_time"; diff --git a/rust/cymbal/src/symbol_store/caching.rs b/rust/cymbal/src/symbol_store/caching.rs index 5cab851fdaca7f..1f1cb22ac9f0bf 100644 --- a/rust/cymbal/src/symbol_store/caching.rs +++ b/rust/cymbal/src/symbol_store/caching.rs @@ -6,7 +6,8 @@ use tokio::sync::Mutex; use crate::{ error::Error, metric_consts::{ - STORE_CACHED_BYTES, STORE_CACHE_EVICTIONS, STORE_CACHE_HITS, STORE_CACHE_MISSES, + STORE_CACHED_BYTES, STORE_CACHE_EVICTIONS, STORE_CACHE_EVICTION_RUNS, STORE_CACHE_HITS, + STORE_CACHE_MISSES, }, }; @@ -117,7 +118,7 @@ impl SymbolSetCache { return; } - metrics::counter!(STORE_CACHE_EVICTIONS).increment(1); + metrics::counter!(STORE_CACHE_EVICTION_RUNS).increment(1); let mut vals: Vec<_> = self.cached.iter().collect(); @@ -137,6 +138,11 @@ impl SymbolSetCache { to_remove.push(to_remove_key.clone()); } + for key in to_remove { + metrics::counter!(STORE_CACHE_EVICTIONS).increment(1); + self.cached.remove(&key); + } + metrics::gauge!(STORE_CACHED_BYTES).set(self.held_bytes as f64); } } diff --git a/rust/feature-flags/src/api/endpoint.rs b/rust/feature-flags/src/api/endpoint.rs index e995cfd5dc15e2..b083ee573c9e28 100644 --- a/rust/feature-flags/src/api/endpoint.rs +++ b/rust/feature-flags/src/api/endpoint.rs @@ -46,7 +46,6 @@ pub async fn flags( let context = RequestContext { state, ip, - meta: meta.0, headers, body, }; diff --git a/rust/feature-flags/src/api/handler.rs b/rust/feature-flags/src/api/handler.rs index 0fdade8d95128f..7a6bef7eed098d 100644 --- a/rust/feature-flags/src/api/handler.rs +++ b/rust/feature-flags/src/api/handler.rs @@ -58,7 +58,6 @@ pub struct FlagsQueryParams { pub struct RequestContext { pub state: State, pub ip: IpAddr, - pub meta: FlagsQueryParams, pub headers: HeaderMap, pub body: Bytes, } @@ -82,11 +81,24 @@ pub struct FeatureFlagEvaluationContext { hash_key_override: Option, } +/// Process a feature flag request and return the evaluated flags +/// +/// ## Flow +/// 1. Decodes and validates the request +/// 2. Extracts and verifies the authentication token +/// 3. Retrieves team information +/// 4. Processes person and group properties +/// 5. Retrieves feature flags +/// 6. Evaluates flags based on the context +/// +/// ## Error Handling +/// - Returns early if any step fails +/// - Maintains error context through the FlagError enum +/// - Individual flag evaluation failures don't fail the entire request pub async fn process_request(context: RequestContext) -> Result { let RequestContext { state, ip, - meta: _, // TODO use this headers, body, } = context; @@ -95,12 +107,12 @@ pub async fn process_request(context: RequestContext) -> Result Result>, + existing_overrides: Option>>, +) -> Option>> { + match groups { + Some(groups) => { + let group_key_overrides: HashMap> = groups + .into_iter() + .map(|(group_type, group_key)| { + let mut properties = existing_overrides + .as_ref() + .and_then(|g| g.get(&group_type)) + .cloned() + .unwrap_or_default(); + + properties.insert("$group_key".to_string(), group_key); + + (group_type, properties) + }) + .collect(); + + let mut result = existing_overrides.unwrap_or_default(); + result.extend(group_key_overrides); + Some(result) + } + None => existing_overrides, + } +} + /// Decode a request into a `FlagRequest` /// - Currently only supports JSON requests // TODO support all supported content types @@ -738,4 +787,61 @@ mod tests { assert!(!result.error_while_computing_flags); assert_eq!(result.feature_flags["test_flag"], FlagValue::Boolean(true)); } + + #[test] + fn test_process_group_property_overrides() { + // Test case 1: Both groups and existing overrides + let groups = HashMap::from([ + ("project".to_string(), json!("project_123")), + ("organization".to_string(), json!("org_456")), + ]); + + let mut existing_overrides = HashMap::new(); + let mut project_props = HashMap::new(); + project_props.insert("industry".to_string(), json!("tech")); + existing_overrides.insert("project".to_string(), project_props); + + let result = + process_group_property_overrides(Some(groups.clone()), Some(existing_overrides)); + + assert!(result.is_some()); + let result = result.unwrap(); + + // Check project properties + let project_props = result.get("project").expect("Project properties missing"); + assert_eq!(project_props.get("industry"), Some(&json!("tech"))); + assert_eq!(project_props.get("$group_key"), Some(&json!("project_123"))); + + // Check organization properties + let org_props = result + .get("organization") + .expect("Organization properties missing"); + assert_eq!(org_props.get("$group_key"), Some(&json!("org_456"))); + + // Test case 2: Only groups, no existing overrides + let result = process_group_property_overrides(Some(groups.clone()), None); + + assert!(result.is_some()); + let result = result.unwrap(); + assert_eq!(result.len(), 2); + assert_eq!( + result.get("project").unwrap().get("$group_key"), + Some(&json!("project_123")) + ); + + // Test case 3: No groups, only existing overrides + let mut existing_overrides = HashMap::new(); + let mut project_props = HashMap::new(); + project_props.insert("industry".to_string(), json!("tech")); + existing_overrides.insert("project".to_string(), project_props); + + let result = process_group_property_overrides(None, Some(existing_overrides.clone())); + + assert!(result.is_some()); + assert_eq!(result.unwrap(), existing_overrides); + + // Test case 4: Neither groups nor existing overrides + let result = process_group_property_overrides(None, None); + assert!(result.is_none()); + } } diff --git a/rust/feature-flags/src/api/types.rs b/rust/feature-flags/src/api/types.rs index 3eb81b7d1adad0..0f04f2a5b40a51 100644 --- a/rust/feature-flags/src/api/types.rs +++ b/rust/feature-flags/src/api/types.rs @@ -1,4 +1,5 @@ use serde::{Deserialize, Serialize}; +use serde_json::Value; use std::collections::HashMap; #[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] @@ -18,4 +19,5 @@ pub enum FlagValue { pub struct FlagsResponse { pub error_while_computing_flags: bool, pub feature_flags: HashMap, + pub feature_flag_payloads: HashMap, // flag key -> payload } diff --git a/rust/feature-flags/src/cohort/cohort_operations.rs b/rust/feature-flags/src/cohort/cohort_operations.rs index 60afc7ca30f1c7..b992e8f66473f3 100644 --- a/rust/feature-flags/src/cohort/cohort_operations.rs +++ b/rust/feature-flags/src/cohort/cohort_operations.rs @@ -302,8 +302,6 @@ mod tests { .find(|c| c.id == main_cohort.id) .expect("Failed to find main cohort"); - println!("fetched_main_cohort: {:?}", fetched_main_cohort); - let dependencies = fetched_main_cohort.extract_dependencies().unwrap(); let expected_dependencies: HashSet = [dependent_cohort.id].iter().cloned().collect(); diff --git a/rust/feature-flags/src/flags/flag_matching.rs b/rust/feature-flags/src/flags/flag_matching.rs index 04b9dc67c79399..09287fde4fef0b 100644 --- a/rust/feature-flags/src/flags/flag_matching.rs +++ b/rust/feature-flags/src/flags/flag_matching.rs @@ -308,9 +308,25 @@ impl FeatureFlagMatcher { error_while_computing_flags: initial_error || flags_response.error_while_computing_flags, feature_flags: flags_response.feature_flags, + feature_flag_payloads: flags_response.feature_flag_payloads, } } + /// Processes hash key overrides for feature flags with experience continuity enabled. + /// + /// This method handles the logic for managing hash key overrides, which are used to ensure + /// consistent feature flag experiences across different distinct IDs (e.g., when a user logs in). + /// It performs the following steps: + /// + /// 1. Checks if a hash key override needs to be written by comparing the current distinct ID + /// with the provided hash key + /// 2. If needed, writes the hash key override to the database using the writer connection + /// 3. Increments metrics to track successful/failed hash key override writes + /// 4. Retrieves and returns the current hash key overrides for the target distinct IDs + /// + /// Returns a tuple containing: + /// - Option>: The hash key overrides if successfully retrieved, None if there was an error + /// - bool: Whether there was an error during processing (true = error occurred) async fn process_hash_key_override( &self, hash_key: String, @@ -397,15 +413,22 @@ impl FeatureFlagMatcher { } } - async fn evaluate_flags_with_overrides( + /// Evaluates feature flags with property and hash key overrides. + /// + /// This function evaluates feature flags in two steps: + /// 1. First, it evaluates flags that can be computed using only the provided property overrides + /// 2. Then, for remaining flags that need database properties, it fetches and caches those properties + /// before evaluating those flags + pub async fn evaluate_flags_with_overrides( &mut self, feature_flags: FeatureFlagList, person_property_overrides: Option>, group_property_overrides: Option>>, hash_key_overrides: Option>, ) -> FlagsResponse { - let mut result = HashMap::new(); let mut error_while_computing_flags = false; + let mut feature_flags_map = HashMap::new(); + let mut feature_flag_payloads_map = HashMap::new(); let mut flags_needing_db_properties = Vec::new(); // Step 1: Evaluate flags with locally computable property overrides first @@ -425,7 +448,11 @@ impl FeatureFlagMatcher { { Ok(Some(flag_match)) => { let flag_value = self.flag_match_to_value(&flag_match); - result.insert(flag.key.clone(), flag_value); + feature_flags_map.insert(flag.key.clone(), flag_value); + + if let Some(payload) = flag_match.payload { + feature_flag_payloads_map.insert(flag.key.clone(), payload); + } } Ok(None) => { flags_needing_db_properties.push(flag.clone()); @@ -448,21 +475,51 @@ impl FeatureFlagMatcher { // Step 2: Fetch and cache properties for remaining flags (just one DB lookup for all of relevant properties) if !flags_needing_db_properties.is_empty() { - let group_type_indexes: HashSet = flags_needing_db_properties + let group_type_indexes_required: HashSet = flags_needing_db_properties .iter() .filter_map(|flag| flag.get_group_type_index()) .collect(); + // Map group names to group_type_index and group_keys + let group_type_to_key_map: HashMap = self + .groups + .iter() + .filter_map(|(group_type, group_key_value)| { + let group_key = group_key_value.as_str()?.to_string(); + self.group_type_mapping_cache + .group_types_to_indexes + .get(group_type) + .cloned() + .map(|group_type_index| (group_type_index, group_key)) + }) + .collect(); + + // Extract group_keys that are relevant to the required group_type_indexes + let group_keys: HashSet = group_type_to_key_map + .iter() + .filter_map(|(group_type_index, group_key)| { + if group_type_indexes_required.contains(group_type_index) { + Some(group_key.clone()) + } else { + None + } + }) + .collect(); + + // Extract group_type_indexes for the required flags + let group_type_indexes: HashSet = group_type_indexes_required.clone(); + let reader = self.reader.clone(); let distinct_id = self.distinct_id.clone(); let team_id = self.team_id; - match fetch_and_locally_cache_all_properties( + match fetch_and_locally_cache_all_relevant_properties( &mut self.properties_cache, reader, distinct_id, team_id, &group_type_indexes, + &group_keys, ) .await { @@ -487,9 +544,6 @@ impl FeatureFlagMatcher { } // Step 3: Evaluate remaining flags with cached properties - // At this point we've already done a round of flag evaluations with locally computable property overrides - // This step is for flags that couldn't be evaluated locally due to missing property values, - // so we do a single query to fetch all of the remaining properties, and then proceed with flag evaluations for flag in flags_needing_db_properties { match self .get_match(&flag, None, hash_key_overrides.clone()) @@ -497,7 +551,11 @@ impl FeatureFlagMatcher { { Ok(flag_match) => { let flag_value = self.flag_match_to_value(&flag_match); - result.insert(flag.key.clone(), flag_value); + feature_flags_map.insert(flag.key.clone(), flag_value); + + if let Some(payload) = flag_match.payload { + feature_flag_payloads_map.insert(flag.key.clone(), payload); + } } Err(e) => { error_while_computing_flags = true; @@ -519,7 +577,8 @@ impl FeatureFlagMatcher { FlagsResponse { error_while_computing_flags, - feature_flags: result, + feature_flags: feature_flags_map, + feature_flag_payloads: feature_flag_payloads_map, } } @@ -638,6 +697,10 @@ impl FeatureFlagMatcher { property_overrides: Option>, hash_key_overrides: Option>, ) -> Result { + let ha = self + .hashed_identifier(flag, hash_key_overrides.clone()) + .await?; + println!("hashed_identifier: {:?}", ha); if self .hashed_identifier(flag, hash_key_overrides.clone()) .await? @@ -1061,8 +1124,49 @@ impl FeatureFlagMatcher { let reader = self.reader.clone(); let team_id = self.team_id; + // groups looks like this {"project": "project_123"} + // and then the group type index looks like this {"project": 1} + // so I want my group keys to look like this ["project_123"], + // but they need to be aware of the different group types + // Retrieve group_type_name using group_type_index from the cache + let group_type_mapping = self + .group_type_mapping_cache + .group_type_index_to_group_type_map() + .await?; + let group_type_name = match group_type_mapping.get(&group_type_index) { + Some(name) => name.clone(), + None => { + error!( + "No group_type_name found for group_type_index {}", + group_type_index + ); + return Err(FlagError::NoGroupTypeMappings); + } + }; + + // Retrieve the corresponding group_key from self.groups using group_type_name + let group_key = match self.groups.get(&group_type_name) { + Some(Value::String(key)) => key.clone(), + Some(_) => { + error!( + "Group key for group_type_name '{}' is not a string", + group_type_name + ); + return Err(FlagError::NoGroupTypeMappings); + } + None => { + // If there's no group_key provided for this group_type_name, we consider that there are no properties to fetch + return Ok(HashMap::new()); + } + }; let db_properties = - fetch_group_properties_from_db(reader, team_id, group_type_index).await?; + fetch_group_properties_from_db(reader, team_id, group_type_index, group_key).await?; + + inc( + DB_GROUP_PROPERTIES_READS_COUNTER, + &[("team_id".to_string(), team_id.to_string())], + 1, + ); inc( DB_GROUP_PROPERTIES_READS_COUNTER, @@ -1140,10 +1244,13 @@ impl FeatureFlagMatcher { .await? .get(&group_type_index) .and_then(|group_type_name| self.groups.get(group_type_name)) - .cloned() - .unwrap_or_default(); + .and_then(|v| v.as_str()) + // NB: we currently use empty string ("") as the hashed identifier for group flags without a group key, + // and I don't want to break parity with the old service since I don't want the hash values to change + .unwrap_or("") + .to_string(); - Ok(group_key.to_string()) + Ok(group_key) } else { // Person-based flag // Use hash key overrides for experience continuity @@ -1172,9 +1279,9 @@ impl FeatureFlagMatcher { .hashed_identifier(feature_flag, hash_key_overrides) .await?; if hashed_identifier.is_empty() { - // Return a hash value that will make the flag evaluate to false - // TODO make this cleaner – we should have a way to return a default value - return Ok(0.0); + // Return a hash value that will make the flag evaluate to false; since we + // can't evaluate a flag without an identifier. + return Ok(0.0); // NB: A flag with 0.0 hash will always evaluate to false } let hash_key = format!("{}.{}{}", feature_flag.key, hashed_identifier, salt); let mut hasher = Sha1::new(); @@ -1248,7 +1355,7 @@ impl FeatureFlagMatcher { /// Evaluate static cohort filters by checking if the person is in each cohort. async fn evaluate_static_cohorts( reader: PostgresReader, - person_id: i32, // Change this parameter from distinct_id to person_id + person_id: i32, cohort_ids: Vec, ) -> Result, FlagError> { let mut conn = reader.get_connection().await?; @@ -1268,7 +1375,7 @@ async fn evaluate_static_cohorts( let rows = sqlx::query(query) .bind(&cohort_ids) - .bind(person_id) // Bind person_id directly + .bind(person_id) .fetch_all(&mut *conn) .await?; @@ -1448,7 +1555,6 @@ fn build_cohort_dependency_graph( } } - // Check for cycles, this is an directed acyclic graph so we use is_cyclic_directed if is_cyclic_directed(&graph) { return Err(FlagError::CohortDependencyCycle(format!( "Cyclic dependency detected starting at cohort {}", @@ -1463,52 +1569,62 @@ fn build_cohort_dependency_graph( /// /// This function fetches both person and group properties for a specified distinct ID and team ID. /// It updates the properties cache with the fetched properties and returns the result. -async fn fetch_and_locally_cache_all_properties( +async fn fetch_and_locally_cache_all_relevant_properties( properties_cache: &mut PropertiesCache, reader: PostgresReader, distinct_id: String, team_id: TeamId, group_type_indexes: &HashSet, + group_keys: &HashSet, ) -> Result<(), FlagError> { let mut conn = reader.as_ref().get_connection().await?; let query = r#" - SELECT - person.person_id, - person.person_properties, - group_properties.group_properties - FROM ( - SELECT - "posthog_person"."id" AS person_id, - "posthog_person"."properties" AS person_properties - FROM "posthog_person" - INNER JOIN "posthog_persondistinctid" - ON "posthog_person"."id" = "posthog_persondistinctid"."person_id" - WHERE - "posthog_persondistinctid"."distinct_id" = $1 - AND "posthog_persondistinctid"."team_id" = $2 - AND "posthog_person"."team_id" = $2 - LIMIT 1 - ) AS person, - ( - SELECT - json_object_agg( - "posthog_group"."group_type_index", - "posthog_group"."group_properties" - ) AS group_properties - FROM "posthog_group" - WHERE - "posthog_group"."team_id" = $2 - AND "posthog_group"."group_type_index" = ANY($3) - ) AS group_properties + SELECT + ( + SELECT "posthog_person"."id" + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" + ON "posthog_person"."id" = "posthog_persondistinctid"."person_id" + WHERE + "posthog_persondistinctid"."distinct_id" = $1 + AND "posthog_persondistinctid"."team_id" = $2 + AND "posthog_person"."team_id" = $2 + LIMIT 1 + ) AS person_id, + ( + SELECT "posthog_person"."properties" + FROM "posthog_person" + INNER JOIN "posthog_persondistinctid" + ON "posthog_person"."id" = "posthog_persondistinctid"."person_id" + WHERE + "posthog_persondistinctid"."distinct_id" = $1 + AND "posthog_persondistinctid"."team_id" = $2 + AND "posthog_person"."team_id" = $2 + LIMIT 1 + ) AS person_properties, + ( + SELECT + json_object_agg( + "posthog_group"."group_type_index", + "posthog_group"."group_properties" + ) + FROM "posthog_group" + WHERE + "posthog_group"."team_id" = $2 + AND "posthog_group"."group_type_index" = ANY($3) + AND "posthog_group"."group_key" = ANY($4) + ) AS group_properties "#; let group_type_indexes_vec: Vec = group_type_indexes.iter().cloned().collect(); + let group_keys_vec: Vec = group_keys.iter().cloned().collect(); let row: (Option, Option, Option) = sqlx::query_as(query) .bind(&distinct_id) .bind(team_id) .bind(&group_type_indexes_vec) + .bind(&group_keys_vec) // Bind group_keys_vec to $4 .fetch_optional(&mut *conn) .await? .unwrap_or((None, None, None)); @@ -1602,6 +1718,7 @@ async fn fetch_group_properties_from_db( reader: PostgresReader, team_id: TeamId, group_type_index: GroupTypeIndex, + group_key: String, ) -> Result, FlagError> { let mut conn = reader.as_ref().get_connection().await?; @@ -1609,13 +1726,15 @@ async fn fetch_group_properties_from_db( SELECT "posthog_group"."group_properties" FROM "posthog_group" WHERE ("posthog_group"."team_id" = $1 - AND "posthog_group"."group_type_index" = $2) + AND "posthog_group"."group_type_index" = $2 + AND "posthog_group"."group_key" = $3) LIMIT 1 "#; let row: Option = sqlx::query_scalar(query) .bind(team_id) .bind(group_type_index) + .bind(group_key) .fetch_optional(&mut *conn) .await?; diff --git a/rust/feature-flags/src/utils/test_utils.rs b/rust/feature-flags/src/utils/test_utils.rs index 1c6cf2b1caafe6..99501fcaa8b78e 100644 --- a/rust/feature-flags/src/utils/test_utils.rs +++ b/rust/feature-flags/src/utils/test_utils.rs @@ -261,6 +261,29 @@ pub async fn insert_new_team_in_pg( Ok(team) } +pub async fn insert_group_type_mapping_in_pg( + client: Arc, + team_id: i32, + group_type: &str, + group_type_index: i32, +) -> Result<(), Error> { + let mut conn = client.get_connection().await?; + let res = sqlx::query( + r#"INSERT INTO posthog_grouptypemapping + (team_id, project_id, group_type, group_type_index, name_singular, name_plural) + VALUES + ($1, $1, $2, $3, NULL, NULL) + ON CONFLICT (team_id, group_type) DO NOTHING"#, + ) + .bind(team_id) + .bind(group_type) + .bind(group_type_index) + .execute(&mut *conn) + .await?; + assert_eq!(res.rows_affected(), 1); + Ok(()) +} + pub async fn insert_flag_for_team_in_pg( client: Arc, team_id: i32, @@ -454,3 +477,55 @@ pub async fn add_person_to_cohort( Ok(()) } + +#[derive(Debug)] +pub struct Group { + pub id: i32, + pub team_id: i32, + pub group_type_index: i32, + pub group_key: String, + pub group_properties: Value, +} + +pub async fn create_group_in_pg( + client: Arc, + team_id: i32, + group_type: &str, + group_key: &str, + group_properties: Value, +) -> Result { + // First, retrieve the group_type_index from grouptypemapping + let mut conn = client.get_connection().await?; + let row = sqlx::query( + r#"SELECT group_type_index FROM posthog_grouptypemapping + WHERE team_id = $1 AND group_type = $2"#, + ) + .bind(team_id) + .bind(group_type) + .fetch_one(&mut *conn) + .await?; + let group_type_index: i32 = row.get("group_type_index"); + + // Insert the group with all non-nullable fields + let res = sqlx::query( + r#"INSERT INTO posthog_group + (team_id, group_type_index, group_key, group_properties, created_at, properties_last_updated_at, properties_last_operation, version) + VALUES ($1, $2, $3, $4, '2024-06-17', '{}'::jsonb, '{}'::jsonb, 0) + RETURNING id"#, + ) + .bind(team_id) + .bind(group_type_index) + .bind(group_key) + .bind(group_properties.clone()) + .fetch_one(&mut *conn) + .await?; + let group_id: i32 = res.get("id"); + + Ok(Group { + id: group_id, + team_id, + group_type_index, + group_key: group_key.to_string(), + group_properties, + }) +} diff --git a/rust/feature-flags/tests/test_flags.rs b/rust/feature-flags/tests/test_flags.rs index 918a73ede6faaa..9ee793596c0b1b 100644 --- a/rust/feature-flags/tests/test_flags.rs +++ b/rust/feature-flags/tests/test_flags.rs @@ -1,6 +1,7 @@ use anyhow::Result; use assert_json_diff::assert_json_include; +use rand::Rng; use reqwest::StatusCode; use serde_json::{json, Value}; @@ -8,8 +9,9 @@ use crate::common::*; use feature_flags::config::DEFAULT_TEST_CONFIG; use feature_flags::utils::test_utils::{ - insert_flags_for_team_in_redis, insert_new_team_in_pg, insert_new_team_in_redis, - setup_pg_reader_client, setup_redis_client, + create_group_in_pg, insert_flags_for_team_in_redis, insert_new_team_in_pg, + insert_new_team_in_redis, insert_person_for_team_in_pg, setup_pg_reader_client, + setup_redis_client, }; pub mod common; @@ -91,7 +93,7 @@ async fn it_rejects_invalid_headers_flag_request() -> Result<()> { .await; assert_eq!(StatusCode::BAD_REQUEST, res.status()); - // We don't want to deserialize the data into a flagResponse struct here, + // We don't want to deserialize the data into a FlagsResponse struct here, // because we want to assert the shape of the raw json data. let response_text = res.text().await?; @@ -193,7 +195,6 @@ async fn it_handles_malformed_json() -> Result<()> { assert_eq!(StatusCode::BAD_REQUEST, res.status()); let response_text = res.text().await?; - println!("Response text: {:?}", response_text); assert!( response_text.contains("Failed to decode request: invalid JSON"), @@ -400,7 +401,7 @@ async fn it_handles_flag_with_property_filter() -> Result<()> { } #[tokio::test] -async fn it_handles_flag_with_group_properties() -> Result<()> { +async fn it_matches_flags_to_a_request_with_group_property_overrides() -> Result<()> { let config = DEFAULT_TEST_CONFIG.clone(); let distinct_id = "user_distinct_id".to_string(); @@ -500,3 +501,244 @@ async fn it_handles_flag_with_group_properties() -> Result<()> { Ok(()) } + +#[tokio::test] +async fn test_feature_flags_with_json_payloads() -> Result<()> { + let config = DEFAULT_TEST_CONFIG.clone(); + let distinct_id = "example_id".to_string(); + let redis_client = setup_redis_client(Some(config.redis_url.clone())); + let pg_client = setup_pg_reader_client(None).await; + + // Insert a new team into Redis and retrieve the team details + let team = insert_new_team_in_redis(redis_client.clone()) + .await + .unwrap(); + let token = team.api_token; + + insert_new_team_in_pg(pg_client.clone(), Some(team.id)) + .await + .unwrap(); + + insert_person_for_team_in_pg( + pg_client.clone(), + team.id, + distinct_id.clone(), + Some(json!({"email": "tim@posthog.com"})), + ) + .await?; + + let flag_json = json!([{ + "id": 1, + "key": "filter-by-property", + "name": "Filter by property", + "active": true, + "deleted": false, + "team_id": team.id, + "filters": { + "groups": [ + { + "properties": [ + { + "key": "email", + "value": "tim@posthog.com", + "operator": "exact", + "type": "person", + } + ], + "rollout_percentage": null, + } + ], + "payloads": { + "true": { + "color": "blue" + } + }, + }, + }]); + + insert_flags_for_team_in_redis(redis_client, team.id, Some(flag_json.to_string())).await?; + + let server = ServerHandle::for_config(config).await; + + let payload = json!({ + "token": token, + "distinct_id": distinct_id, + }); + + let res = server.send_flags_request(payload.to_string()).await; + + assert_eq!(StatusCode::OK, res.status()); + + let json_data = res.json::().await?; + + assert_json_include!( + actual: json_data, + expected: json!({ + "featureFlagPayloads": { + "filter-by-property": { "color": "blue" } + } + }) + ); + + Ok(()) +} + +#[tokio::test] +async fn test_feature_flags_with_group_relationships() -> Result<()> { + let config = DEFAULT_TEST_CONFIG.clone(); + let distinct_id = "example_id".to_string(); + let redis_client = setup_redis_client(Some(config.redis_url.clone())); + let pg_client = setup_pg_reader_client(None).await; + let team_id = rand::thread_rng().gen_range(1..10_000_000); + let team = insert_new_team_in_pg(pg_client.clone(), Some(team_id)) + .await + .unwrap(); + + let token = team.api_token; + + // Create a group of type "organization" (group_type_index 1) with group_key "foo" and specific properties + create_group_in_pg( + pg_client.clone(), + team.id, + "organization", + "foo", + json!({"email": "posthog@example.com"}), + ) + .await?; + + // Create a group of type "project" (group_type_index 0) with group_key "bar" and specific properties + create_group_in_pg( + pg_client.clone(), + team.id, + "project", + "bar", + json!({"name": "Project Bar"}), + ) + .await?; + + // Define feature flags + let flags_json = json!([ + { + "id": 1, + "key": "default-no-prop-group-flag", + "name": "This is a feature flag with default params, no filters.", + "active": true, + "deleted": false, + "team_id": team.id, + "filters": { + "aggregation_group_type_index": 0, + "groups": [{"rollout_percentage": null}] + } + }, + { + "id": 2, + "key": "groups-flag", + "name": "This is a group-based flag", + "active": true, + "deleted": false, + "team_id": team.id, + "filters": { + "aggregation_group_type_index": 1, + "groups": [ + { + "properties": [ + { + "key": "email", + "value": "posthog", + "operator": "icontains", + "type": "group", + "group_type_index": 1 + } + ], + "rollout_percentage": null + } + ] + } + } + ]); + + // Insert the feature flags into Redis + insert_flags_for_team_in_redis(redis_client.clone(), team.id, Some(flags_json.to_string())) + .await?; + + let server = ServerHandle::for_config(config).await; + + // First Decision: Without specifying any groups + { + let payload = json!({ + "token": token, + "distinct_id": distinct_id + }); + + let res = server.send_flags_request(payload.to_string()).await; + assert_eq!(res.status(), StatusCode::OK); + + let json_data = res.json::().await?; + assert_json_include!( + actual: json_data, + expected: json!({ + "errorWhileComputingFlags": false, + "featureFlags": { + "default-no-prop-group-flag": false, // if we don't specify any groups in the request, the flags should be false + "groups-flag": false + } + }) + ); + } + + // Second Decision: With non-matching group overrides + { + let payload = json!({ + "token": token, + "distinct_id": distinct_id, + "groups": { + "organization": "foo2", // Does not match existing group_key "foo" + "project": "bar" // Matches existing project group + } + }); + + let res = server.send_flags_request(payload.to_string()).await; + assert_eq!(res.status(), StatusCode::OK); + + let json_data = res.json::().await?; + assert_json_include!( + actual: json_data, + expected: json!({ + "errorWhileComputingFlags": false, + "featureFlags": { + "default-no-prop-group-flag": true, + "groups-flag": false + } + }) + ); + } + + // Third Decision: With matching group + { + let payload = json!({ + "token": token, + "distinct_id": distinct_id, + "groups": { + "organization": "foo", // Matches existing group_key for organization "foo" + "project": "bar" // Matches existing group_key for project "bar" + } + }); + + let res = server.send_flags_request(payload.to_string()).await; + assert_eq!(res.status(), StatusCode::OK); + + let json_data = res.json::().await?; + assert_json_include!( + actual: json_data, + expected: json!({ + "errorWhileComputingFlags": false, + "featureFlags": { + "default-no-prop-group-flag": true, + "groups-flag": true + } + }) + ); + } + + Ok(()) +}