diff --git a/.github/workflows/ci-backend.yml b/.github/workflows/ci-backend.yml index 48a0454bd0f33..410d4deb18461 100644 --- a/.github/workflows/ci-backend.yml +++ b/.github/workflows/ci-backend.yml @@ -91,7 +91,6 @@ jobs: backend-code-quality: needs: changes - # if: needs.changes.outputs.backend == 'true' # <- Can't add this here because it's a required check timeout-minutes: 30 name: Python code quality checks @@ -106,12 +105,10 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} - uses: actions/checkout@v3 - if: needs.changes.outputs.backend == 'true' with: fetch-depth: 1 - name: Set up Python - if: needs.changes.outputs.backend == 'true' uses: actions/setup-python@v5 with: python-version: 3.11.9 @@ -121,40 +118,32 @@ jobs: # uv is a fast pip alternative: https://github.com/astral-sh/uv/ - run: pip install uv - if: needs.changes.outputs.backend == 'true' - name: Install SAML (python3-saml) dependencies - if: needs.changes.outputs.backend == 'true' run: | sudo apt-get update sudo apt-get install libxml2-dev libxmlsec1 libxmlsec1-dev libxmlsec1-openssl - name: Install Python dependencies - if: needs.changes.outputs.backend == 'true' run: | uv pip install --system -r requirements.txt -r requirements-dev.txt - name: Check for syntax errors, import sort, and code style violations - if: needs.changes.outputs.backend == 'true' run: | ruff check . - name: Check formatting - if: needs.changes.outputs.backend == 'true' run: | ruff format --check --diff . - name: Add Problem Matcher - if: needs.changes.outputs.backend == 'true' run: echo "::add-matcher::.github/mypy-problem-matcher.json" - name: Check static typing - if: needs.changes.outputs.backend == 'true' run: | mypy -p posthog | mypy-baseline filter - name: Check if "schema.py" is up to date - if: needs.changes.outputs.backend == 'true' run: | npm run schema:build:python && git diff --exit-code @@ -235,9 +224,8 @@ jobs: django: needs: changes - # if: needs.changes.outputs.backend == 'true' # <- Can't add this here because it's a required check - - timeout-minutes: 30 # increase for tmate testing + # increase for tmate testing + timeout-minutes: 30 name: Django tests – ${{ matrix.segment }} (persons-on-events ${{ matrix.person-on-events && 'on' || 'off' }}), Py ${{ matrix.python-version }}, ${{ matrix.clickhouse-server-image }} (${{matrix.group}}/${{ matrix.concurrency }}) runs-on: ubuntu-24.04 @@ -273,8 +261,10 @@ jobs: group: 3 steps: + # The first step is the only one that should run if `needs.changes.outputs.backend == 'false'`. + # All the other ones should rely on `needs.changes.outputs.backend` directly or indirectly, so that they're + # effectively skipped if backend code is unchanged. See https://github.com/PostHog/posthog/pull/15174. - uses: actions/checkout@v3 - if: needs.changes.outputs.backend == 'true' with: fetch-depth: 1 repository: ${{ github.event.pull_request.head.repo.full_name }} @@ -317,7 +307,7 @@ jobs: fi - name: Fail CI if some snapshots have been updated but not committed - if: needs.changes.outputs.backend == 'true' && steps.changed-files.outputs.files_found == 'true' && steps.add-and-commit.outcome == 'success' + if: steps.changed-files.outputs.files_found == 'true' && steps.add-and-commit.outcome == 'success' run: | echo "${{ steps.changed-files.outputs.diff }}" exit 1 @@ -333,11 +323,11 @@ jobs: async-migrations: name: Async migrations tests - ${{ matrix.clickhouse-server-image }} needs: changes - if: needs.changes.outputs.backend == 'true' strategy: fail-fast: false matrix: clickhouse-server-image: ['clickhouse/clickhouse-server:24.8.7.41'] + if: needs.changes.outputs.backend == 'true' runs-on: ubuntu-24.04 steps: - name: 'Checkout repo' @@ -391,7 +381,7 @@ jobs: calculate-running-time: name: Calculate running time - needs: [changes, django, async-migrations] + needs: [django, async-migrations] runs-on: ubuntu-24.04 if: # Run on pull requests to PostHog/posthog + on PostHog/posthog outside of PRs - but never on forks needs.changes.outputs.backend == 'true' && diff --git a/.github/workflows/ci-e2e.yml b/.github/workflows/ci-e2e.yml index ada720bb48862..4ec28981e5f1d 100644 --- a/.github/workflows/ci-e2e.yml +++ b/.github/workflows/ci-e2e.yml @@ -68,7 +68,6 @@ jobs: container: name: Build and cache container image runs-on: ubuntu-24.04 - if: needs.changes.outputs.shouldTriggerCypress == 'true' timeout-minutes: 60 needs: [changes] permissions: @@ -79,9 +78,12 @@ jobs: build-id: ${{ steps.build.outputs.build-id }} steps: - name: Checkout + if: needs.changes.outputs.shouldTriggerCypress == 'true' uses: actions/checkout@v3 - name: Build the Docker image with Depot - uses: ./.github/actions/build-n-cache-image # Build the container image in preparation for the E2E tests + if: needs.changes.outputs.shouldTriggerCypress == 'true' + # Build the container image in preparation for the E2E tests + uses: ./.github/actions/build-n-cache-image id: build with: save: true @@ -166,8 +168,7 @@ jobs: build-id: ${{ needs.container.outputs.build-id }} tags: ${{ needs.container.outputs.tag }} - - name: Write .env - if: needs.changes.outputs.shouldTriggerCypress == 'true' + - name: Write .env # This step intentionally has no if, so that GH always considers the action as having run run: | cat <> .env SECRET_KEY=6b01eee4f945ca25045b5aab440b953461faf08693a9abbf1166dc7c6b9772da @@ -279,7 +280,7 @@ jobs: calculate-running-time: name: Calculate running time runs-on: ubuntu-24.04 - needs: [changes, cypress] + needs: [cypress] if: needs.changes.outputs.shouldTriggerCypress == 'true' && github.event.pull_request.head.repo.full_name == 'PostHog/posthog' steps: diff --git a/.github/workflows/ci-frontend.yml b/.github/workflows/ci-frontend.yml index eedc30ca91010..f59c7e8eef790 100644 --- a/.github/workflows/ci-frontend.yml +++ b/.github/workflows/ci-frontend.yml @@ -52,13 +52,12 @@ jobs: frontend-code-quality: name: Code quality checks - # if: needs.changes.outputs.frontend == 'true' # <- Can't add this here because it's a required check needs: changes # kea typegen and typescript:check need some more oomph runs-on: ubuntu-24.04 steps: + # we need at least one thing to run to make sure we include everything for required jobs - uses: actions/checkout@v3 - if: needs.changes.outputs.frontend == 'true' - name: Install pnpm if: needs.changes.outputs.frontend == 'true' @@ -124,7 +123,6 @@ jobs: jest: runs-on: ubuntu-24.04 needs: changes - # if: needs.changes.outputs.frontend == 'true' # <- Can't add this here because it's a required check name: Jest test (${{ matrix.segment }} - ${{ matrix.chunk }}) strategy: @@ -135,8 +133,8 @@ jobs: chunk: [1, 2, 3] steps: + # we need at least one thing to run to make sure we include everything for required jobs - uses: actions/checkout@v3 - if: needs.changes.outputs.frontend == 'true' - name: Remove ee if: needs.changes.outputs.frontend == 'true' && matrix.segment == 'FOSS' diff --git a/.github/workflows/ci-hog.yml b/.github/workflows/ci-hog.yml index 2d9f67dc27ebb..ea51f70721f5c 100644 --- a/.github/workflows/ci-hog.yml +++ b/.github/workflows/ci-hog.yml @@ -11,12 +11,10 @@ on: paths-ignore: - rust/** - livestream/** - - .github/** pull_request: paths-ignore: - rust/** - livestream/** - - .github/** jobs: # Job to decide if we should run backend ci @@ -49,11 +47,10 @@ jobs: hog-tests: needs: changes - if: needs.changes.outputs.hog == 'true' timeout-minutes: 30 - name: Hog tests runs-on: ubuntu-24.04 + if: needs.changes.outputs.hog == 'true' steps: # If this run wasn't initiated by the bot (meaning: snapshot update) and we've determined @@ -93,7 +90,7 @@ jobs: - name: Set up Node.js uses: actions/setup-node@v4 with: - node-version: 18.12.1 + node-version: 18 - name: Check if ANTLR definitions are up to date run: | @@ -138,3 +135,155 @@ jobs: pnpm run build cd ../ ./test.sh && git diff --exit-code + + check-package-version: + name: Check HogVM TypeScript package version and detect an update + needs: hog-tests + if: needs.hog-tests.result == 'success' && needs.changes.outputs.hog == 'true' + runs-on: ubuntu-24.04 + outputs: + committed-version: ${{ steps.check-package-version.outputs.committed-version }} + published-version: ${{ steps.check-package-version.outputs.published-version }} + is-new-version: ${{ steps.check-package-version.outputs.is-new-version }} + steps: + - name: Checkout the repository + uses: actions/checkout@v2 + - name: Check package version and detect an update + id: check-package-version + uses: PostHog/check-package-version@v2 + with: + path: hogvm/typescript + + release-hogvm: + name: Release new HogVM TypeScript version + runs-on: ubuntu-24.04 + needs: check-package-version + if: needs.changes.outputs.hog == 'true' && needs.check-package-version.outputs.is-new-version == 'true' + env: + COMMITTED_VERSION: ${{ needs.check-package-version.outputs.committed-version }} + PUBLISHED_VERSION: ${{ needs.check-package-version.outputs.published-version }} + steps: + - name: Checkout the repository + uses: actions/checkout@v4 + with: + fetch-depth: 1 + token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.11.9 + cache: 'pip' + cache-dependency-path: '**/requirements*.txt' + token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} + - run: pip install uv + - name: Install SAML (python3-saml) dependencies + run: | + sudo apt-get update + sudo apt-get install libxml2-dev libxmlsec1 libxmlsec1-dev libxmlsec1-openssl + - name: Install Python dependencies + run: | + uv pip install --system -r requirements.txt -r requirements-dev.txt + - name: Install pnpm + uses: pnpm/action-setup@v4 + - name: Set up Node 18 + uses: actions/setup-node@v4 + with: + node-version: 18 + registry-url: https://registry.npmjs.org + - name: Install package.json dependencies + run: cd hogvm/typescript && pnpm install + - name: Publish the package in the npm registry + run: cd hogvm/typescript && npm publish --access public + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + - name: Sleep 60 seconds to allow npm to update the package + run: sleep 60 + + update-versions: + name: Update versions in package.json + runs-on: ubuntu-24.04 + needs: release-hogvm + if: always() # This ensures the job runs regardless of the result of release-hogvm + steps: + - name: Checkout the repository + uses: actions/checkout@v4 + with: + fetch-depth: 1 + repository: ${{ github.event.pull_request.head.repo.full_name }} + ref: ${{ github.event.pull_request.head.ref }} + token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} + + - name: Install pnpm + uses: pnpm/action-setup@v4 + - name: Set up Node 18 + uses: actions/setup-node@v4 + with: + node-version: 18 + registry-url: https://registry.npmjs.org + + - name: Check for version mismatches + id: check-mismatch + run: | + # Extract committed version + HOGVM_VERSION=$(jq -r '.version' hogvm/typescript/package.json) + + # Compare dependencies in package.json + MAIN_VERSION=$(jq -r '.dependencies."@posthog/hogvm"' package.json | tr -d '^') + PLUGIN_VERSION=$(jq -r '.dependencies."@posthog/hogvm"' plugin-server/package.json | tr -d '^') + + echo "HOGVM_VERSION=$HOGVM_VERSION" + echo "MAIN_VERSION=$MAIN_VERSION" + echo "PLUGIN_VERSION=$PLUGIN_VERSION" + + # Set output if mismatches exist + if [[ "$HOGVM_VERSION" != "$MAIN_VERSION" || "$HOGVM_VERSION" != "$PLUGIN_VERSION" ]]; then + echo "mismatch=true" >> "$GITHUB_ENV" + else + echo "mismatch=false" >> "$GITHUB_ENV" + fi + + - name: Update package.json versions + if: env.mismatch == 'true' + run: | + VERSION=$(jq ".version" hogvm/typescript/package.json -r) + + retry_pnpm_install() { + local retries=0 + local max_retries=20 # 10 minutes total + local delay=30 + + while [[ $retries -lt $max_retries ]]; do + echo "Attempting pnpm install (retry $((retries+1))/$max_retries)..." + pnpm install --no-frozen-lockfile && break + echo "Install failed. Retrying in $delay seconds..." + sleep $delay + retries=$((retries + 1)) + done + + if [[ $retries -eq $max_retries ]]; then + echo "pnpm install failed after $max_retries attempts." + exit 1 + fi + } + + # Update main package.json + mv package.json package.old.json + jq --indent 4 '.dependencies."@posthog/hogvm" = "^'$VERSION'"' package.old.json > package.json + rm package.old.json + retry_pnpm_install + + # Update plugin-server/package.json + cd plugin-server + mv package.json package.old.json + jq --indent 4 '.dependencies."@posthog/hogvm" = "^'$VERSION'"' package.old.json > package.json + rm package.old.json + retry_pnpm_install + + - name: Commit updated package.json files + if: env.mismatch == 'true' + uses: EndBug/add-and-commit@v9 + with: + add: '["package.json", "pnpm-lock.yaml", "plugin-server/package.json", "plugin-server/pnpm-lock.yaml", "hogvm/typescript/package.json"]' + message: 'Update @posthog/hogvm version in package.json' + default_author: github_actions + github_token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} diff --git a/.github/workflows/ci-plugin-server.yml b/.github/workflows/ci-plugin-server.yml index 814550ddf9d80..30ca845cd89b6 100644 --- a/.github/workflows/ci-plugin-server.yml +++ b/.github/workflows/ci-plugin-server.yml @@ -81,7 +81,6 @@ jobs: tests: name: Plugin Server Tests (${{matrix.shard}}) needs: changes - # if: needs.changes.outputs.plugin-server == 'true' # <- Can't add this here because it's a required check runs-on: ubuntu-24.04 strategy: @@ -97,6 +96,7 @@ jobs: steps: - name: Code check out + # NOTE: We need this check on every step so that it still runs if skipped as we need it to suceed for the CI if: needs.changes.outputs.plugin-server == 'true' uses: actions/checkout@v3 @@ -121,14 +121,12 @@ jobs: # uv is a fast pip alternative: https://github.com/astral-sh/uv/ - run: pip install uv - if: needs.changes.outputs.plugin-server == 'true' - name: Install rust if: needs.changes.outputs.plugin-server == 'true' uses: dtolnay/rust-toolchain@1.82 - uses: actions/cache@v4 - if: needs.changes.outputs.plugin-server == 'true' with: path: | ~/.cargo/registry @@ -195,7 +193,6 @@ jobs: functional-tests: name: Functional tests needs: changes - # if: needs.changes.outputs.plugin-server == 'true' # <- Can't add this here because it's a required check runs-on: ubuntu-24.04 env: @@ -233,7 +230,6 @@ jobs: # uv is a fast pip alternative: https://github.com/astral-sh/uv/ - run: pip install uv - if: needs.changes.outputs.plugin-server == 'true' - name: Install SAML (python3-saml) dependencies if: needs.changes.outputs.plugin-server == 'true' diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 7f1aef10ba0d7..56f1ed3bf0330 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -16,12 +16,10 @@ on: branches: ['master'] paths-ignore: - 'rust/**' - - '.github/**' pull_request: branches: ['master'] paths-ignore: - 'rust/**' - - '.github/**' schedule: - cron: '27 1 * * 0' diff --git a/.github/workflows/container-images-ci.yml b/.github/workflows/container-images-ci.yml index f696fe671bfc5..7b434a7cb546d 100644 --- a/.github/workflows/container-images-ci.yml +++ b/.github/workflows/container-images-ci.yml @@ -5,7 +5,6 @@ on: paths-ignore: - 'rust/**' - 'livestream/**' - - '.github/**' concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} diff --git a/.github/workflows/release-hogvm.yml b/.github/workflows/release-hogvm.yml deleted file mode 100644 index 6c8480202b24d..0000000000000 --- a/.github/workflows/release-hogvm.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: 'Publish HogVM package' - -on: - push: - branches: - - main - - master - -jobs: - release: - name: Publish - runs-on: ubuntu-24.04 - defaults: - run: - working-directory: hogvm/typescript - steps: - - name: Checkout the repository - uses: actions/checkout@v4 - - - name: Check package version and detect an update - id: check-package-version - uses: PostHog/check-package-version@v2 - with: - path: hogvm/typescript - - - name: Install pnpm - uses: pnpm/action-setup@v4 - - - name: Set up Node - uses: actions/setup-node@v4 - if: steps.check-package-version.outputs.is-new-version == 'true' - with: - node-version: 18.12.1 - registry-url: https://registry.npmjs.org - cache: pnpm - cache-dependency-path: hogvm/typescript/pnpm-lock.yaml - - - name: Install dependencies - if: steps.check-package-version.outputs.is-new-version == 'true' - run: pnpm i --frozen-lockfile - - - name: Build - if: steps.check-package-version.outputs.is-new-version == 'true' - run: pnpm build - - - name: Publish the package in the npm registry - id: publish-package - if: steps.check-package-version.outputs.is-new-version == 'true' - run: | - pnpm publish --access public --tag latest - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.vscode/launch.json b/.vscode/launch.json index 3aa83e0bb4e0d..9eb3fe62780f2 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -106,6 +106,30 @@ "group": "main" } }, + { + "name": "Celery Beat", + "consoleName": "Celery Beat", + "type": "debugpy", + "justMyCode": true, + "autoReload": { + "enable": true, + "include": ["posthog/**/*.py"] + }, + "request": "launch", + "program": "${workspaceFolder}/manage.py", + "args": ["run_autoreload_celery", "--type=beat"], + "console": "integratedTerminal", + "cwd": "${workspaceFolder}", + "env": { + "SKIP_ASYNC_MIGRATIONS_SETUP": "1", + "DEBUG": "1", + "BILLING_SERVICE_URL": "https://billing.dev.posthog.dev", + "SKIP_SERVICE_VERSION_REQUIREMENTS": "1" + }, + "presentation": { + "group": "main" + } + }, { "name": "Plugin Server", "command": "npm run start:dev", @@ -200,6 +224,27 @@ "console": "integratedTerminal", "internalConsoleOptions": "neverOpen", "runtimeExecutable": "${env:HOME}/.nvm/versions/node/${input:pickVersion}/bin/node" + }, + { + "name": "Python Debugger: Django Migrations", + "type": "debugpy", + "request": "launch", + "args": ["migrate", "posthog"], + "django": true, + "env": { + "PYTHONUNBUFFERED": "1", + "DJANGO_SETTINGS_MODULE": "posthog.settings", + "DEBUG": "1", + "CLICKHOUSE_SECURE": "False", + "KAFKA_HOSTS": "localhost", + "DATABASE_URL": "postgres://posthog:posthog@localhost:5432/posthog", + "SKIP_SERVICE_VERSION_REQUIREMENTS": "1", + "PRINT_SQL": "1", + "BILLING_SERVICE_URL": "http://localhost:8100/", + "CLOUD_DEPLOYMENT": "dev" + }, + "autoStartBrowser": false, + "program": "${workspaceFolder}/manage.py" } ], "inputs": [ @@ -212,7 +257,14 @@ "compounds": [ { "name": "PostHog", - "configurations": ["Backend", "Celery Threaded Pool", "Frontend", "Plugin Server", "Temporal Worker"], + "configurations": [ + "Backend", + "Celery Threaded Pool", + "Celery Beat", + "Frontend", + "Plugin Server", + "Temporal Worker" + ], "stopAll": true, "presentation": { "order": 1, diff --git a/cypress/README.md b/cypress/README.md index 0930a0ad7bcbf..f043f6f0546b4 100644 --- a/cypress/README.md +++ b/cypress/README.md @@ -2,17 +2,13 @@ The Cypress tests run with a PostHog instance that has no feature flags set up. -To test feature flags you can intercept the call to the `decide` endpoint +To test feature flags you can intercept the call to the `decide` endpoint using this helper ```javascript // sometimes the system under test calls `/decide` // and sometimes it calls https://app.posthog.com/decide -cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - // add feature flags here, for e.g. - // 'feature-flag-key': true, - }) - ) -) +setupFeatureFlags({ + // add feature flags here, for e.g. + 'feature-flag-key': true, +}) ``` diff --git a/cypress/e2e/alerts.cy.ts b/cypress/e2e/alerts.cy.ts index 82bd6bc10f4fb..91ecad1d24489 100644 --- a/cypress/e2e/alerts.cy.ts +++ b/cypress/e2e/alerts.cy.ts @@ -1,15 +1,11 @@ -import { decideResponse } from '../fixtures/api/decide' import { createInsight, createInsightWithBreakdown } from '../productAnalytics' +import { setupFeatureFlags } from '../support/decide' describe('Alerts', () => { beforeEach(() => { - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - alerts: true, - }) - ) - ) + setupFeatureFlags({ + alerts: true, + }) createInsight('insight') }) diff --git a/cypress/e2e/dashboard.cy.ts b/cypress/e2e/dashboard.cy.ts index b5f62097ebee1..cefbb60b6fe69 100644 --- a/cypress/e2e/dashboard.cy.ts +++ b/cypress/e2e/dashboard.cy.ts @@ -379,6 +379,8 @@ describe('Dashboard', () => { cy.get('[data-attr="date-filter"]').click() cy.contains('span', 'Last 14 days').click() + cy.wait(2000) + // insight meta should be updated to show new date range cy.get('h5').contains('Last 14 days').should('exist') diff --git a/cypress/e2e/experiments.cy.ts b/cypress/e2e/experiments.cy.ts index 5a7d92c3f49c1..a635cf7841cad 100644 --- a/cypress/e2e/experiments.cy.ts +++ b/cypress/e2e/experiments.cy.ts @@ -1,4 +1,4 @@ -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' describe('Experiments', () => { let randomNum @@ -47,13 +47,10 @@ describe('Experiments', () => { }) const createExperimentInNewUi = (): void => { - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - 'new-experiments-ui': true, - }) - ) - ) + setupFeatureFlags({ + 'new-experiments-ui': true, + }) + cy.visit('/experiments') // Name, flag key, description diff --git a/cypress/e2e/exports.cy.ts b/cypress/e2e/exports.cy.ts index 7e96b0c56d454..8131a98425602 100644 --- a/cypress/e2e/exports.cy.ts +++ b/cypress/e2e/exports.cy.ts @@ -1,18 +1,14 @@ import { urls } from 'scenes/urls' -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' // NOTE: As the API data is randomly generated, we are only really testing here that the overall output is correct // The actual graph is not under test describe('Exporting Insights', () => { beforeEach(() => { - cy.intercept('https://us.i.posthog.com/decide/*', (req) => - req.reply( - decideResponse({ - 'export-dashboard-insights': true, - }) - ) - ) + setupFeatureFlags({ + 'export-dashboard-insights': true, + }) cy.visit(urls.insightNew()) // apply filter cy.get('[data-attr$=add-filter-group]').click() diff --git a/cypress/e2e/featureFlags.cy.ts b/cypress/e2e/featureFlags.cy.ts index 2dceb97af6b21..df4d740b8ec4b 100644 --- a/cypress/e2e/featureFlags.cy.ts +++ b/cypress/e2e/featureFlags.cy.ts @@ -1,10 +1,10 @@ -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' describe('Feature Flags', () => { let name beforeEach(() => { - cy.intercept('**/decide/*', (req) => req.reply(decideResponse({}))) + setupFeatureFlags({}) cy.intercept('/api/projects/*/property_definitions?type=person*', { fixture: 'api/feature-flags/property_definition', diff --git a/cypress/e2e/insights-navigation-open-sql-insight-first.cy.ts b/cypress/e2e/insights-navigation-open-sql-insight-first.cy.ts index 85f472a09c97b..a902e861bacd6 100644 --- a/cypress/e2e/insights-navigation-open-sql-insight-first.cy.ts +++ b/cypress/e2e/insights-navigation-open-sql-insight-first.cy.ts @@ -56,6 +56,9 @@ describe('Insights', () => { it('can open a new stickiness insight', () => { insight.clickTab('STICKINESS') + // this test flaps, so check for a parent element, that is present even when failing + // in the hope that it slows the test down a little and stops it flapping + cy.get('.InsightVizDisplay--type-stickiness').should('exist') cy.get('.TrendsInsight canvas').should('exist') }) diff --git a/cypress/e2e/onboarding.cy.ts b/cypress/e2e/onboarding.cy.ts index b9453689a12aa..3ffd5ccc4bc27 100644 --- a/cypress/e2e/onboarding.cy.ts +++ b/cypress/e2e/onboarding.cy.ts @@ -1,16 +1,11 @@ -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' describe('Onboarding', () => { beforeEach(() => { cy.intercept('/api/billing/', { fixture: 'api/billing/billing-unsubscribed.json' }) - - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - 'product-intro-pages': 'test', - }) - ) - ) + setupFeatureFlags({ + 'product-intro-pages': 'test', + }) }) it('Navigate between /products to /onboarding to a product intro page', () => { diff --git a/cypress/e2e/signup.cy.ts b/cypress/e2e/signup.cy.ts index 9774236ef81c4..76d7a694d8c50 100644 --- a/cypress/e2e/signup.cy.ts +++ b/cypress/e2e/signup.cy.ts @@ -1,4 +1,4 @@ -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from '../support/decide' const VALID_PASSWORD = 'hedgE-hog-123%' @@ -171,13 +171,9 @@ describe('Signup', () => { }) it('Shows redirect notice if redirecting for maintenance', () => { - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - 'redirect-signups-to-instance': 'us', - }) - ) - ) + setupFeatureFlags({ + 'redirect-signups-to-instance': 'us', + }) cy.visit('/logout') cy.location('pathname').should('include', '/login') diff --git a/cypress/fixtures/api/decide.js b/cypress/fixtures/api/decide.js index 102f1211152c1..7c03b11c6dc48 100644 --- a/cypress/fixtures/api/decide.js +++ b/cypress/fixtures/api/decide.js @@ -8,6 +8,7 @@ export function decideResponse(featureFlags) { }, isAuthenticated: true, supportedCompression: ['gzip', 'gzip-js', 'lz64'], + hasFeatureFlags: Object.keys(featureFlags).length > 0, featureFlags, sessionRecording: { endpoint: '/s/', diff --git a/cypress/support/decide.ts b/cypress/support/decide.ts new file mode 100644 index 0000000000000..a32e192f74d25 --- /dev/null +++ b/cypress/support/decide.ts @@ -0,0 +1,28 @@ +import { decideResponse } from '../fixtures/api/decide' + +export const setupFeatureFlags = (overrides: Record = {}): void => { + // Tricky - the new RemoteConfig endpoint is optimised to not load decide if there are no feature flags in the DB. + // We need to intercept both the RemoteConfig and the decide endpoint to ensure that the feature flags are always loaded. + + cy.intercept('**/array/*/config', (req) => + req.reply( + decideResponse({ + ...overrides, + }) + ) + ) + + cy.intercept('**/array/*/config.js', (req) => + req.continue((res) => { + res.send(res.body) + }) + ) + + cy.intercept('**/decide/*', (req) => + req.reply( + decideResponse({ + ...overrides, + }) + ) + ) +} diff --git a/cypress/support/e2e.ts b/cypress/support/e2e.ts index fe164bf074b3a..f05a549c9bf30 100644 --- a/cypress/support/e2e.ts +++ b/cypress/support/e2e.ts @@ -4,7 +4,7 @@ import 'cypress-axe' import { urls } from 'scenes/urls' -import { decideResponse } from '../fixtures/api/decide' +import { setupFeatureFlags } from './decide' try { // eslint-disable-next-line @typescript-eslint/no-var-requires @@ -86,14 +86,7 @@ beforeEach(() => { Cypress.env('POSTHOG_PROPERTY_GITHUB_ACTION_RUN_URL', process.env.GITHUB_ACTION_RUN_URL) cy.useSubscriptionStatus('subscribed') - cy.intercept('**/decide/*', (req) => - req.reply( - decideResponse({ - // Feature flag to be treated as rolled out in E2E tests, e.g.: - // 'toolbar-launch-side-action': true, - }) - ) - ) + setupFeatureFlags({}) // un-intercepted sometimes this doesn't work and the page gets stuck on the SpinnerOverlay cy.intercept(/app.posthog.com\/api\/projects\/@current\/feature_flags\/my_flags.*/, (req) => req.reply([])) diff --git a/ee/api/conversation.py b/ee/api/conversation.py new file mode 100644 index 0000000000000..70e314b94039f --- /dev/null +++ b/ee/api/conversation.py @@ -0,0 +1,69 @@ +from typing import cast + +from django.http import StreamingHttpResponse +from pydantic import ValidationError +from rest_framework import serializers +from rest_framework.renderers import BaseRenderer +from rest_framework.request import Request +from rest_framework.viewsets import GenericViewSet + +from ee.hogai.assistant import Assistant +from ee.models.assistant import Conversation +from posthog.api.routing import TeamAndOrgViewSetMixin +from posthog.models.user import User +from posthog.rate_limit import AIBurstRateThrottle, AISustainedRateThrottle +from posthog.schema import HumanMessage + + +class MessageSerializer(serializers.Serializer): + content = serializers.CharField(required=True, max_length=1000) + conversation = serializers.UUIDField(required=False) + + def validate(self, data): + try: + message = HumanMessage(content=data["content"]) + data["message"] = message + except ValidationError: + raise serializers.ValidationError("Invalid message content.") + return data + + +class ServerSentEventRenderer(BaseRenderer): + media_type = "text/event-stream" + format = "txt" + + def render(self, data, accepted_media_type=None, renderer_context=None): + return data + + +class ConversationViewSet(TeamAndOrgViewSetMixin, GenericViewSet): + scope_object = "INTERNAL" + serializer_class = MessageSerializer + renderer_classes = [ServerSentEventRenderer] + queryset = Conversation.objects.all() + lookup_url_kwarg = "conversation" + + def safely_get_queryset(self, queryset): + # Only allow access to conversations created by the current user + return queryset.filter(user=self.request.user) + + def get_throttles(self): + return [AIBurstRateThrottle(), AISustainedRateThrottle()] + + def create(self, request: Request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + conversation_id = serializer.validated_data.get("conversation") + if conversation_id: + self.kwargs[self.lookup_url_kwarg] = conversation_id + conversation = self.get_object() + else: + conversation = self.get_queryset().create(user=request.user, team=self.team) + assistant = Assistant( + self.team, + conversation, + serializer.validated_data["message"], + user=cast(User, request.user), + is_new_conversation=not conversation_id, + ) + return StreamingHttpResponse(assistant.stream(), content_type=ServerSentEventRenderer.media_type) diff --git a/ee/api/dashboard_collaborator.py b/ee/api/dashboard_collaborator.py index 1687bf5d83115..4cdd8833983cd 100644 --- a/ee/api/dashboard_collaborator.py +++ b/ee/api/dashboard_collaborator.py @@ -91,7 +91,7 @@ class DashboardCollaboratorViewSet( scope_object = "dashboard" permission_classes = [CanEditDashboardCollaborator] pagination_class = None - queryset = DashboardPrivilege.objects.select_related("dashboard").filter(user__is_active=True) + queryset = DashboardPrivilege.objects.select_related("dashboard", "dashboard__team").filter(user__is_active=True) lookup_field = "user__uuid" serializer_class = DashboardCollaboratorSerializer filter_rewrite_rules = {"project_id": "dashboard__team__project_id"} diff --git a/ee/api/hooks.py b/ee/api/hooks.py index 6dd6dfd85e5c3..22d54c4b7bf8e 100644 --- a/ee/api/hooks.py +++ b/ee/api/hooks.py @@ -23,6 +23,7 @@ def create_zapier_hog_function(hook: Hook, serializer_context: dict) -> HogFunct serializer = HogFunctionSerializer( data={ "template_id": template_zapier.id, + "type": "destination", "name": f"Zapier webhook for action {hook.resource_id}", "filters": {"actions": [{"id": str(hook.resource_id), "name": "", "type": "actions", "order": 0}]}, "inputs": { diff --git a/ee/api/test/test_conversation.py b/ee/api/test/test_conversation.py new file mode 100644 index 0000000000000..6eb466876dc01 --- /dev/null +++ b/ee/api/test/test_conversation.py @@ -0,0 +1,157 @@ +from unittest.mock import patch + +from rest_framework import status + +from ee.hogai.assistant import Assistant +from ee.models.assistant import Conversation +from posthog.models.team.team import Team +from posthog.models.user import User +from posthog.test.base import APIBaseTest + + +class TestConversation(APIBaseTest): + def setUp(self): + super().setUp() + self.other_team = Team.objects.create(organization=self.organization, name="other team") + self.other_user = User.objects.create_and_join( + organization=self.organization, + email="other@posthog.com", + password="password", + first_name="Other", + ) + + def _get_streaming_content(self, response): + return b"".join(response.streaming_content) + + def test_create_conversation(self): + with patch.object(Assistant, "_stream", return_value=["test response"]) as stream_mock: + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"content": "test query"}, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(self._get_streaming_content(response), b"test response") + self.assertEqual(Conversation.objects.count(), 1) + conversation: Conversation = Conversation.objects.first() + self.assertEqual(conversation.user, self.user) + self.assertEqual(conversation.team, self.team) + stream_mock.assert_called_once() + + def test_add_message_to_existing_conversation(self): + with patch.object(Assistant, "_stream", return_value=["test response"]) as stream_mock: + conversation = Conversation.objects.create(user=self.user, team=self.team) + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + { + "conversation": str(conversation.id), + "content": "test query", + }, + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(self._get_streaming_content(response), b"test response") + self.assertEqual(Conversation.objects.count(), 1) + stream_mock.assert_called_once() + + def test_cant_access_other_users_conversation(self): + conversation = Conversation.objects.create(user=self.other_user, team=self.team) + + self.client.force_login(self.user) + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"conversation": conversation.id, "content": "test query"}, + ) + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_cant_access_other_teams_conversation(self): + conversation = Conversation.objects.create(user=self.user, team=self.other_team) + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"conversation": conversation.id, "content": "test query"}, + ) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_invalid_message_format(self): + response = self.client.post("/api/environments/@current/conversations/") + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_rate_limit_burst(self): + # Create multiple requests to trigger burst rate limit + with patch.object(Assistant, "_stream", return_value=["test response"]): + for _ in range(11): # Assuming burst limit is less than this + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"content": "test query"}, + ) + self.assertEqual(response.status_code, status.HTTP_429_TOO_MANY_REQUESTS) + + def test_empty_content(self): + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"content": ""}, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_content_too_long(self): + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"content": "x" * 1001}, # Very long message + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_invalid_conversation_id(self): + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + { + "conversation": "not-a-valid-uuid", + "content": "test query", + }, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + def test_nonexistent_conversation(self): + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + { + "conversation": "12345678-1234-5678-1234-567812345678", + "content": "test query", + }, + ) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_deleted_conversation(self): + # Create and then delete a conversation + conversation = Conversation.objects.create(user=self.user, team=self.team) + conversation_id = conversation.id + conversation.delete() + + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + { + "conversation": str(conversation_id), + "content": "test query", + }, + ) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_unauthenticated_request(self): + self.client.logout() + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"content": "test query"}, + ) + self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) + + def test_streaming_error_handling(self): + def raise_error(): + yield "some content" + raise Exception("Streaming error") + + with patch.object(Assistant, "_stream", side_effect=raise_error): + response = self.client.post( + f"/api/environments/{self.team.id}/conversations/", + {"content": "test query"}, + ) + with self.assertRaises(Exception) as context: + b"".join(response.streaming_content) + self.assertTrue("Streaming error" in str(context.exception)) diff --git a/ee/clickhouse/queries/event_query.py b/ee/clickhouse/queries/event_query.py index 3f7857fd9a970..64f08da69d6bf 100644 --- a/ee/clickhouse/queries/event_query.py +++ b/ee/clickhouse/queries/event_query.py @@ -7,7 +7,6 @@ from posthog.models.filters.path_filter import PathFilter from posthog.models.filters.properties_timeline_filter import PropertiesTimelineFilter from posthog.models.filters.retention_filter import RetentionFilter -from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter from posthog.models.filters.stickiness_filter import StickinessFilter from posthog.models.property import PropertyName from posthog.models.team import Team @@ -25,7 +24,6 @@ def __init__( PathFilter, RetentionFilter, StickinessFilter, - SessionRecordingsFilter, PropertiesTimelineFilter, ], team: Team, diff --git a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr index cfb948992d66d..983cdf00b5aa0 100644 --- a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr +++ b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr @@ -1,17 +1,5 @@ # serializer version: 1 # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.1 ''' /* user_id:0 request:_snapshot_ */ SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, @@ -27,7 +15,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.2 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.1 ''' /* user_id:0 request:_snapshot_ */ SELECT countIf(steps = 1) step_1, @@ -106,19 +94,86 @@ GROUP BY prop ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.2 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + LEFT OUTER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max(max_steps)) + GROUP BY prop ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.1 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones ''' /* user_id:0 request:_snapshot_ */ SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, @@ -134,6 +189,85 @@ OFFSET 0 ''' # --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.1 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + LEFT OUTER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'Europe/Amsterdam') >= toDateTime('2020-01-01 14:20:21', 'Europe/Amsterdam') + AND toTimeZone(timestamp, 'Europe/Amsterdam') <= toDateTime('2020-01-06 10:00:00', 'Europe/Amsterdam') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'Europe/Amsterdam') >= toDateTime('2020-01-01 14:20:21', 'Europe/Amsterdam') + AND toTimeZone(timestamp, 'Europe/Amsterdam') <= toDateTime('2020-01-06 10:00:00', 'Europe/Amsterdam') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max(max_steps)) + GROUP BY prop + ''' +# --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.2 ''' /* user_id:0 request:_snapshot_ */ @@ -213,19 +347,181 @@ GROUP BY prop ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants + ''' + /* user_id:0 request:_snapshot_ */ + SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, + count(*) as count + FROM events e + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + LEFT OUTER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max(max_steps)) + GROUP BY prop + ''' +# --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + LEFT OUTER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 99999 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max(max_steps)) + GROUP BY prop + ''' +# --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation ''' /* user_id:0 request:_snapshot_ */ SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, @@ -241,7 +537,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.1 ''' /* user_id:0 request:_snapshot_ */ SELECT countIf(steps = 1) step_1, @@ -276,13 +572,13 @@ min(latest_1) over (PARTITION by aggregation_target, prop ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , - if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop FROM (SELECT *, if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop FROM (SELECT e.timestamp as timestamp, - if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as aggregation_target, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, '$account_id'), ''), 'null'), '^"|"$', '') as aggregation_target, if(notEmpty(pdi.distinct_id), pdi.person_id, e.person_id) as person_id, if(event = '$pageview', 1, 0) as step_0, if(step_0 = 1, timestamp, null) as latest_0, @@ -320,34 +616,6 @@ GROUP BY prop ''' # --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.1 - ''' - /* user_id:0 request:_snapshot_ */ - SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, - count(*) as count - FROM events e - WHERE team_id = 99999 - AND event IN ['$pageleave', '$pageview'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.2 ''' /* user_id:0 request:_snapshot_ */ @@ -428,18 +696,6 @@ ''' # --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.1 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -458,7 +714,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.1 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -506,7 +762,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.3 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.2 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -526,7 +782,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.4 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.3 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -590,19 +846,71 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.4 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test']), (['control', 'test']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 99999 + AND event = '$feature_flag_called' + AND (((isNull(replaceRegexpAll(JSONExtractRaw(e.properties, 'exclude'), '^"|"$', '')) + OR NOT JSONHas(e.properties, 'exclude'))) + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', ''))))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -619,7 +927,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -665,7 +973,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -683,7 +991,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 ''' /* user_id:0 request:_snapshot_ */ SELECT [now()] AS date, @@ -692,19 +1000,16 @@ LIMIT 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT [now()] AS date, + [0] AS total, + '' AS breakdown_value + LIMIT 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.1 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -721,7 +1026,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.1 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -767,7 +1072,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.3 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.2 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -785,7 +1090,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.4 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.3 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -847,19 +1152,69 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.4 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 07:00:00', 'US/Pacific')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 02:10:00', 'US/Pacific')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'US/Pacific')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test']), (['control', 'test']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 99999 + AND event = '$feature_flag_called' + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) + AND toTimeZone(timestamp, 'US/Pacific') >= toDateTime('2020-01-01 02:10:00', 'US/Pacific') + AND toTimeZone(timestamp, 'US/Pacific') <= toDateTime('2020-01-06 07:00:00', 'US/Pacific') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.1 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -877,7 +1232,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.1 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -924,7 +1279,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.3 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.2 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -942,6 +1297,68 @@ OFFSET 0 ''' # --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.3 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test']), (['control', 'test']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 99999 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 99999 + AND event = '$feature_flag_called' + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ''' +# --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.4 ''' /* user_id:0 request:_snapshot_ */ diff --git a/ee/hogai/assistant.py b/ee/hogai/assistant.py index 77b1c2c050008..3a296ba9ce7d6 100644 --- a/ee/hogai/assistant.py +++ b/ee/hogai/assistant.py @@ -1,9 +1,12 @@ +import json from collections.abc import AsyncGenerator, Generator, Iterator from functools import partial -from typing import Any, Literal, Optional, TypedDict, TypeGuard, Union +from typing import Any, Optional +from uuid import uuid4 from asgiref.sync import sync_to_async from langchain_core.messages import AIMessageChunk +from langchain_core.runnables.config import RunnableConfig from langfuse.callback import CallbackHandler from langgraph.graph.state import CompiledStateGraph from pydantic import BaseModel @@ -17,7 +20,19 @@ from ee.hogai.trends.nodes import ( TrendsGeneratorNode, ) -from ee.hogai.utils import AssistantNodeName, AssistantState, Conversation +from ee.hogai.utils.state import ( + GraphMessageUpdateTuple, + GraphTaskStartedUpdateTuple, + GraphValueUpdateTuple, + is_message_update, + is_state_update, + is_task_started_update, + is_value_update, + validate_state_update, + validate_value_update, +) +from ee.hogai.utils.types import AssistantNodeName, AssistantState, PartialAssistantState +from ee.models import Conversation from posthog.event_usage import report_user_action from posthog.models import Team, User from posthog.schema import ( @@ -40,42 +55,6 @@ langfuse_handler = None -def is_value_update(update: list[Any]) -> TypeGuard[tuple[Literal["values"], dict[AssistantNodeName, AssistantState]]]: - """ - Transition between nodes. - """ - return len(update) == 2 and update[0] == "updates" - - -class LangGraphState(TypedDict): - langgraph_node: AssistantNodeName - - -def is_message_update( - update: list[Any], -) -> TypeGuard[tuple[Literal["messages"], tuple[Union[AIMessageChunk, Any], LangGraphState]]]: - """ - Streaming of messages. Returns a partial state. - """ - return len(update) == 2 and update[0] == "messages" - - -def is_state_update(update: list[Any]) -> TypeGuard[tuple[Literal["updates"], AssistantState]]: - """ - Update of the state. - """ - return len(update) == 2 and update[0] == "values" - - -def is_task_started_update( - update: list[Any], -) -> TypeGuard[tuple[Literal["messages"], tuple[Union[AIMessageChunk, Any], LangGraphState]]]: - """ - Streaming of messages. Returns a partial state. - """ - return len(update) == 2 and update[0] == "debug" and update[1]["type"] == "task" - - VISUALIZATION_NODES: dict[AssistantNodeName, type[SchemaGeneratorNode]] = { AssistantNodeName.TRENDS_GENERATOR: TrendsGeneratorNode, AssistantNodeName.FUNNEL_GENERATOR: FunnelGeneratorNode, @@ -87,13 +66,25 @@ class Assistant: _graph: CompiledStateGraph _user: Optional[User] _conversation: Conversation + _latest_message: HumanMessage + _state: Optional[AssistantState] - def __init__(self, team: Team, conversation: Conversation, user: Optional[User] = None): + def __init__( + self, + team: Team, + conversation: Conversation, + new_message: HumanMessage, + user: Optional[User] = None, + is_new_conversation: bool = False, + ): self._team = team self._user = user self._conversation = conversation + self._latest_message = new_message.model_copy(deep=True, update={"id": str(uuid4())}) + self._is_new_conversation = is_new_conversation self._graph = AssistantGraph(team).compile_full_graph() self._chunks = AIMessageChunk(content="") + self._state = None def stream(self): if SERVER_GATEWAY_INTERFACE == "ASGI": @@ -110,15 +101,19 @@ async def _astream(self) -> AsyncGenerator[str, None]: break def _stream(self) -> Generator[str, None, None]: - callbacks = [langfuse_handler] if langfuse_handler else [] + state = self._init_or_update_state() + config = self._get_config() + generator: Iterator[Any] = self._graph.stream( - self._initial_state, - config={"recursion_limit": 24, "callbacks": callbacks}, - stream_mode=["messages", "values", "updates", "debug"], + state, config=config, stream_mode=["messages", "values", "updates", "debug"] ) - # Send a chunk to establish the connection avoiding the worker's timeout. - yield self._serialize_message(AssistantGenerationStatusEvent(type=AssistantGenerationStatusType.ACK)) + # Assign the conversation id to the client. + if self._is_new_conversation: + yield self._serialize_conversation() + + # Send the last message with the initialized id. + yield self._serialize_message(self._latest_message) try: last_viz_message = None @@ -127,7 +122,15 @@ def _stream(self) -> Generator[str, None, None]: if isinstance(message, VisualizationMessage): last_viz_message = message yield self._serialize_message(message) - self._report_conversation(last_viz_message) + + # Check if the assistant has requested help. + state = self._graph.get_state(config) + if state.next: + yield self._serialize_message( + AssistantMessage(content=state.tasks[0].interrupts[0].value, id=str(uuid4())) + ) + else: + self._report_conversation_state(last_viz_message) except: # This is an unhandled error, so we just stop further generation at this point yield self._serialize_message(FailureMessage()) @@ -135,8 +138,34 @@ def _stream(self) -> Generator[str, None, None]: @property def _initial_state(self) -> AssistantState: - messages = [message.root for message in self._conversation.messages] - return {"messages": messages, "intermediate_steps": None, "plan": None} + return AssistantState(messages=[self._latest_message], start_id=self._latest_message.id) + + def _get_config(self) -> RunnableConfig: + callbacks = [langfuse_handler] if langfuse_handler else [] + config: RunnableConfig = { + "recursion_limit": 24, + "callbacks": callbacks, + "configurable": {"thread_id": self._conversation.id}, + } + return config + + def _init_or_update_state(self): + config = self._get_config() + snapshot = self._graph.get_state(config) + if snapshot.next: + saved_state = validate_state_update(snapshot.values) + self._state = saved_state + if saved_state.intermediate_steps: + intermediate_steps = saved_state.intermediate_steps.copy() + intermediate_steps[-1] = (intermediate_steps[-1][0], self._latest_message.content) + self._graph.update_state( + config, + PartialAssistantState(messages=[self._latest_message], intermediate_steps=intermediate_steps), + ) + return None + initial_state = self._initial_state + self._state = initial_state + return initial_state def _node_to_reasoning_message( self, node_name: AssistantNodeName, input: AssistantState @@ -152,7 +181,7 @@ def _node_to_reasoning_message( ): substeps: list[str] = [] if input: - if intermediate_steps := input.get("intermediate_steps"): + if intermediate_steps := input.intermediate_steps: for action, _ in intermediate_steps: match action.tool: case "retrieve_event_properties": @@ -178,42 +207,65 @@ def _node_to_reasoning_message( return None def _process_update(self, update: Any) -> BaseModel | None: - if is_value_update(update): - _, state_update = update + if is_state_update(update): + _, new_state = update + self._state = validate_state_update(new_state) + elif is_value_update(update) and (new_message := self._process_value_update(update)): + return new_message + elif is_message_update(update) and (new_message := self._process_message_update(update)): + return new_message + elif is_task_started_update(update) and (new_message := self._process_task_started_update(update)): + return new_message + return None - if AssistantNodeName.ROUTER in state_update and "messages" in state_update[AssistantNodeName.ROUTER]: - return state_update[AssistantNodeName.ROUTER]["messages"][0] - elif intersected_nodes := state_update.keys() & VISUALIZATION_NODES.keys(): - # Reset chunks when schema validation fails. - self._chunks = AIMessageChunk(content="") + def _process_value_update(self, update: GraphValueUpdateTuple) -> BaseModel | None: + _, maybe_state_update = update + state_update = validate_value_update(maybe_state_update) + + if node_val := state_update.get(AssistantNodeName.ROUTER): + if isinstance(node_val, PartialAssistantState) and node_val.messages: + return node_val.messages[0] + elif intersected_nodes := state_update.keys() & VISUALIZATION_NODES.keys(): + # Reset chunks when schema validation fails. + self._chunks = AIMessageChunk(content="") - node_name = intersected_nodes.pop() - if "messages" in state_update[node_name]: - return state_update[node_name]["messages"][0] - elif state_update[node_name].get("intermediate_steps", []): - return AssistantGenerationStatusEvent(type=AssistantGenerationStatusType.GENERATION_ERROR) - elif AssistantNodeName.SUMMARIZER in state_update: + node_name = intersected_nodes.pop() + node_val = state_update[node_name] + if not isinstance(node_val, PartialAssistantState): + return None + if node_val.messages: + return node_val.messages[0] + elif node_val.intermediate_steps: + return AssistantGenerationStatusEvent(type=AssistantGenerationStatusType.GENERATION_ERROR) + elif node_val := state_update.get(AssistantNodeName.SUMMARIZER): + if isinstance(node_val, PartialAssistantState) and node_val.messages: self._chunks = AIMessageChunk(content="") - return state_update[AssistantNodeName.SUMMARIZER]["messages"][0] - elif is_message_update(update): - langchain_message, langgraph_state = update[1] - if isinstance(langchain_message, AIMessageChunk): - if langgraph_state["langgraph_node"] in VISUALIZATION_NODES.keys(): - self._chunks += langchain_message # type: ignore - parsed_message = VISUALIZATION_NODES[langgraph_state["langgraph_node"]].parse_output( - self._chunks.tool_calls[0]["args"] - ) - if parsed_message: - return VisualizationMessage(answer=parsed_message.query) - elif langgraph_state["langgraph_node"] == AssistantNodeName.SUMMARIZER: - self._chunks += langchain_message # type: ignore - return AssistantMessage(content=self._chunks.content) - elif is_task_started_update(update): - _, task_update = update - node_name = task_update["payload"]["name"] # type: ignore - node_input = task_update["payload"]["input"] # type: ignore - if reasoning_message := self._node_to_reasoning_message(node_name, node_input): - return reasoning_message + return node_val.messages[0] + + return None + + def _process_message_update(self, update: GraphMessageUpdateTuple) -> BaseModel | None: + langchain_message, langgraph_state = update[1] + if isinstance(langchain_message, AIMessageChunk): + if langgraph_state["langgraph_node"] in VISUALIZATION_NODES.keys(): + self._chunks += langchain_message # type: ignore + parsed_message = VISUALIZATION_NODES[langgraph_state["langgraph_node"]].parse_output( + self._chunks.tool_calls[0]["args"] + ) + if parsed_message: + initiator_id = self._state.start_id if self._state is not None else None + return VisualizationMessage(answer=parsed_message.query, initiator=initiator_id) + elif langgraph_state["langgraph_node"] == AssistantNodeName.SUMMARIZER: + self._chunks += langchain_message # type: ignore + return AssistantMessage(content=self._chunks.content) + return None + + def _process_task_started_update(self, update: GraphTaskStartedUpdateTuple) -> BaseModel | None: + _, task_update = update + node_name = task_update["payload"]["name"] # type: ignore + node_input = task_update["payload"]["input"] # type: ignore + if reasoning_message := self._node_to_reasoning_message(node_name, node_input): + return reasoning_message return None def _serialize_message(self, message: BaseModel) -> str: @@ -224,9 +276,15 @@ def _serialize_message(self, message: BaseModel) -> str: output += f"event: {AssistantEventType.MESSAGE}\n" return output + f"data: {message.model_dump_json(exclude_none=True)}\n\n" - def _report_conversation(self, message: Optional[VisualizationMessage]): - human_message = self._conversation.messages[-1].root - if self._user and message and isinstance(human_message, HumanMessage): + def _serialize_conversation(self) -> str: + output = f"event: {AssistantEventType.CONVERSATION}\n" + json_conversation = json.dumps({"id": str(self._conversation.id)}) + output += f"data: {json_conversation}\n\n" + return output + + def _report_conversation_state(self, message: Optional[VisualizationMessage]): + human_message = self._latest_message + if self._user and message: report_user_action( self._user, "chat with ai", diff --git a/ee/hogai/django_checkpoint/__init__.py b/ee/hogai/django_checkpoint/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ee/hogai/django_checkpoint/checkpointer.py b/ee/hogai/django_checkpoint/checkpointer.py new file mode 100644 index 0000000000000..78817dca9df76 --- /dev/null +++ b/ee/hogai/django_checkpoint/checkpointer.py @@ -0,0 +1,309 @@ +import json +import random +import threading +from collections.abc import Iterable, Iterator, Sequence +from typing import Any, Optional, cast + +from django.db import transaction +from django.db.models import Q +from langchain_core.runnables import RunnableConfig +from langgraph.checkpoint.base import ( + WRITES_IDX_MAP, + BaseCheckpointSaver, + ChannelVersions, + Checkpoint, + CheckpointMetadata, + CheckpointTuple, + PendingWrite, + get_checkpoint_id, +) +from langgraph.checkpoint.serde.jsonplus import JsonPlusSerializer +from langgraph.checkpoint.serde.types import ChannelProtocol + +from ee.models.assistant import ConversationCheckpoint, ConversationCheckpointBlob, ConversationCheckpointWrite + + +class DjangoCheckpointer(BaseCheckpointSaver[str]): + jsonplus_serde = JsonPlusSerializer() + _lock: threading.Lock + + def __init__(self, *args): + super().__init__(*args) + self._lock = threading.Lock() + + def _load_writes(self, writes: Sequence[ConversationCheckpointWrite]) -> list[PendingWrite]: + return ( + [ + ( + str(checkpoint_write.task_id), + checkpoint_write.channel, + self.serde.loads_typed((checkpoint_write.type, checkpoint_write.blob)), + ) + for checkpoint_write in writes + if checkpoint_write.type is not None and checkpoint_write.blob is not None + ] + if writes + else [] + ) + + def _load_json(self, obj: Any): + return self.jsonplus_serde.loads(self.jsonplus_serde.dumps(obj)) + + def _dump_json(self, obj: Any) -> dict[str, Any]: + serialized_metadata = self.jsonplus_serde.dumps(obj) + # NOTE: we're using JSON serializer (not msgpack), so we need to remove null characters before writing + nulls_removed = serialized_metadata.decode().replace("\\u0000", "") + return json.loads(nulls_removed) + + def _get_checkpoint_qs( + self, + config: Optional[RunnableConfig], + filter: Optional[dict[str, Any]], + before: Optional[RunnableConfig], + ): + query = Q() + + # construct predicate for config filter + if config and "configurable" in config: + thread_id = config["configurable"].get("thread_id") + query &= Q(thread_id=thread_id) + checkpoint_ns = config["configurable"].get("checkpoint_ns") + if checkpoint_ns is not None: + query &= Q(checkpoint_ns=checkpoint_ns) + if checkpoint_id := get_checkpoint_id(config): + query &= Q(id=checkpoint_id) + + # construct predicate for metadata filter + if filter: + query &= Q(metadata__contains=filter) + + # construct predicate for `before` + if before is not None: + query &= Q(id__lt=get_checkpoint_id(before)) + + return ConversationCheckpoint.objects.filter(query).order_by("-id") + + def _get_checkpoint_channel_values( + self, checkpoint: ConversationCheckpoint + ) -> Iterable[ConversationCheckpointBlob]: + if not checkpoint.checkpoint: + return [] + loaded_checkpoint = self._load_json(checkpoint.checkpoint) + if "channel_versions" not in loaded_checkpoint: + return [] + query = Q() + for channel, version in loaded_checkpoint["channel_versions"].items(): + query |= Q(channel=channel, version=version) + return checkpoint.blobs.filter(query) + + def list( + self, + config: Optional[RunnableConfig], + *, + filter: Optional[dict[str, Any]] = None, + before: Optional[RunnableConfig] = None, + limit: Optional[int] = None, + ) -> Iterator[CheckpointTuple]: + """List checkpoints from the database. + + This method retrieves a list of checkpoint tuples from the Postgres database based + on the provided config. The checkpoints are ordered by checkpoint ID in descending order (newest first). + + Args: + config (RunnableConfig): The config to use for listing the checkpoints. + filter (Optional[Dict[str, Any]]): Additional filtering criteria for metadata. Defaults to None. + before (Optional[RunnableConfig]): If provided, only checkpoints before the specified checkpoint ID are returned. Defaults to None. + limit (Optional[int]): The maximum number of checkpoints to return. Defaults to None. + + Yields: + Iterator[CheckpointTuple]: An iterator of checkpoint tuples. + """ + qs = self._get_checkpoint_qs(config, filter, before) + if limit: + qs = qs[:limit] + + for checkpoint in qs: + channel_values = self._get_checkpoint_channel_values(checkpoint) + loaded_checkpoint: Checkpoint = self._load_json(checkpoint.checkpoint) + + checkpoint_dict: Checkpoint = { + **loaded_checkpoint, + "pending_sends": [ + self.serde.loads_typed((checkpoint_write.type, checkpoint_write.blob)) + for checkpoint_write in checkpoint.pending_sends + ], + "channel_values": { + checkpoint_blob.channel: self.serde.loads_typed((checkpoint_blob.type, checkpoint_blob.blob)) + for checkpoint_blob in channel_values + if checkpoint_blob.type is not None + and checkpoint_blob.type != "empty" + and checkpoint_blob.blob is not None + }, + } + + yield CheckpointTuple( + { + "configurable": { + "thread_id": checkpoint.thread_id, + "checkpoint_ns": checkpoint.checkpoint_ns, + "checkpoint_id": checkpoint.id, + } + }, + checkpoint_dict, + self._load_json(checkpoint.metadata), + ( + { + "configurable": { + "thread_id": checkpoint.thread_id, + "checkpoint_ns": checkpoint.checkpoint_ns, + "checkpoint_id": checkpoint.parent_checkpoint_id, + } + } + if checkpoint.parent_checkpoint + else None + ), + self._load_writes(checkpoint.pending_writes), + ) + + def get_tuple(self, config: RunnableConfig) -> Optional[CheckpointTuple]: + """Get a checkpoint tuple from the database. + + This method retrieves a checkpoint tuple from the Postgres database based on the + provided config. If the config contains a "checkpoint_id" key, the checkpoint with + the matching thread ID and timestamp is retrieved. Otherwise, the latest checkpoint + for the given thread ID is retrieved. + + Args: + config (RunnableConfig): The config to use for retrieving the checkpoint. + + Returns: + Optional[CheckpointTuple]: The retrieved checkpoint tuple, or None if no matching checkpoint was found. + """ + return next(self.list(config), None) + + def put( + self, + config: RunnableConfig, + checkpoint: Checkpoint, + metadata: CheckpointMetadata, + new_versions: ChannelVersions, + ) -> RunnableConfig: + """Save a checkpoint to the database. + + This method saves a checkpoint to the Postgres database. The checkpoint is associated + with the provided config and its parent config (if any). + + Args: + config (RunnableConfig): The config to associate with the checkpoint. + checkpoint (Checkpoint): The checkpoint to save. + metadata (CheckpointMetadata): Additional metadata to save with the checkpoint. + new_versions (ChannelVersions): New channel versions as of this write. + + Returns: + RunnableConfig: Updated configuration after storing the checkpoint. + """ + configurable = config["configurable"] + thread_id: str = configurable["thread_id"] + checkpoint_id = get_checkpoint_id(config) + checkpoint_ns: str | None = configurable.get("checkpoint_ns") or "" + + checkpoint_copy = cast(dict[str, Any], checkpoint.copy()) + channel_values = checkpoint_copy.pop("channel_values", {}) + + next_config: RunnableConfig = { + "configurable": { + "thread_id": thread_id, + "checkpoint_ns": checkpoint_ns, + "checkpoint_id": checkpoint["id"], + } + } + + with self._lock, transaction.atomic(): + updated_checkpoint, _ = ConversationCheckpoint.objects.update_or_create( + id=checkpoint["id"], + thread_id=thread_id, + checkpoint_ns=checkpoint_ns, + defaults={ + "parent_checkpoint_id": checkpoint_id, + "checkpoint": self._dump_json({**checkpoint_copy, "pending_sends": []}), + "metadata": self._dump_json(metadata), + }, + ) + + blobs = [] + for channel, version in new_versions.items(): + type, blob = ( + self.serde.dumps_typed(channel_values[channel]) if channel in channel_values else ("empty", None) + ) + blobs.append( + ConversationCheckpointBlob( + checkpoint=updated_checkpoint, + channel=channel, + version=str(version), + type=type, + blob=blob, + ) + ) + + ConversationCheckpointBlob.objects.bulk_create(blobs, ignore_conflicts=True) + return next_config + + def put_writes( + self, + config: RunnableConfig, + writes: Sequence[tuple[str, Any]], + task_id: str, + ) -> None: + """Store intermediate writes linked to a checkpoint. + + This method saves intermediate writes associated with a checkpoint to the Postgres database. + + Args: + config (RunnableConfig): Configuration of the related checkpoint. + writes (List[Tuple[str, Any]]): List of writes to store. + task_id (str): Identifier for the task creating the writes. + """ + configurable = config["configurable"] + thread_id: str = configurable["thread_id"] + checkpoint_id = get_checkpoint_id(config) + checkpoint_ns: str | None = configurable.get("checkpoint_ns") or "" + + with self._lock, transaction.atomic(): + # `put_writes` and `put` are concurrently called without guaranteeing the call order + # so we need to ensure the checkpoint is created before creating writes. + # Thread.lock() will prevent race conditions though to the same checkpoints within a single pod. + checkpoint, _ = ConversationCheckpoint.objects.get_or_create( + id=checkpoint_id, thread_id=thread_id, checkpoint_ns=checkpoint_ns + ) + + writes_to_create = [] + for idx, (channel, value) in enumerate(writes): + type, blob = self.serde.dumps_typed(value) + writes_to_create.append( + ConversationCheckpointWrite( + checkpoint=checkpoint, + task_id=task_id, + idx=idx, + channel=channel, + type=type, + blob=blob, + ) + ) + + ConversationCheckpointWrite.objects.bulk_create( + writes_to_create, + update_conflicts=all(w[0] in WRITES_IDX_MAP for w in writes), + unique_fields=["checkpoint", "task_id", "idx"], + update_fields=["channel", "type", "blob"], + ) + + def get_next_version(self, current: Optional[str | int], channel: ChannelProtocol) -> str: + if current is None: + current_v = 0 + elif isinstance(current, int): + current_v = current + else: + current_v = int(current.split(".")[0]) + next_v = current_v + 1 + next_h = random.random() + return f"{next_v:032}.{next_h:016}" diff --git a/ee/hogai/django_checkpoint/test/test_checkpointer.py b/ee/hogai/django_checkpoint/test/test_checkpointer.py new file mode 100644 index 0000000000000..2f8fd7f4a60ed --- /dev/null +++ b/ee/hogai/django_checkpoint/test/test_checkpointer.py @@ -0,0 +1,274 @@ +# type: ignore + +from typing import Any, TypedDict + +from langchain_core.runnables import RunnableConfig +from langgraph.checkpoint.base import ( + Checkpoint, + CheckpointMetadata, + create_checkpoint, + empty_checkpoint, +) +from langgraph.checkpoint.base.id import uuid6 +from langgraph.errors import NodeInterrupt +from langgraph.graph import END, START +from langgraph.graph.state import CompiledStateGraph, StateGraph + +from ee.hogai.django_checkpoint.checkpointer import DjangoCheckpointer +from ee.models.assistant import ( + Conversation, + ConversationCheckpoint, + ConversationCheckpointBlob, + ConversationCheckpointWrite, +) +from posthog.test.base import NonAtomicBaseTest + + +class TestDjangoCheckpointer(NonAtomicBaseTest): + CLASS_DATA_LEVEL_SETUP = False + + def _build_graph(self, checkpointer: DjangoCheckpointer): + class State(TypedDict): + val: int + + graph = StateGraph(State) + + def handle_node1(state: State) -> State: + if state["val"] == 1: + raise NodeInterrupt("test") + return {"val": state["val"] + 1} + + graph.add_node("node1", handle_node1) + graph.add_node("node2", lambda state: state) + + graph.add_edge(START, "node1") + graph.add_edge("node1", "node2") + graph.add_edge("node2", END) + + return graph.compile(checkpointer=checkpointer) + + def test_saver(self): + thread1 = Conversation.objects.create(user=self.user, team=self.team) + thread2 = Conversation.objects.create(user=self.user, team=self.team) + + config_1: RunnableConfig = { + "configurable": { + "thread_id": thread1.id, + "checkpoint_ns": "", + } + } + chkpnt_1: Checkpoint = empty_checkpoint() + + config_2: RunnableConfig = { + "configurable": { + "thread_id": thread2.id, + "checkpoint_ns": "", + } + } + chkpnt_2: Checkpoint = create_checkpoint(chkpnt_1, {}, 1) + + config_3: RunnableConfig = { + "configurable": { + "thread_id": thread2.id, + "checkpoint_id": chkpnt_2["id"], + "checkpoint_ns": "inner", + } + } + chkpnt_3: Checkpoint = empty_checkpoint() + + metadata_1: CheckpointMetadata = { + "source": "input", + "step": 2, + "writes": {}, + "score": 1, + } + metadata_2: CheckpointMetadata = { + "source": "loop", + "step": 1, + "writes": {"foo": "bar"}, + "score": None, + } + metadata_3: CheckpointMetadata = {} + + test_data = { + "configs": [config_1, config_2, config_3], + "checkpoints": [chkpnt_1, chkpnt_2, chkpnt_3], + "metadata": [metadata_1, metadata_2, metadata_3], + } + + saver = DjangoCheckpointer() + + configs = test_data["configs"] + checkpoints = test_data["checkpoints"] + metadata = test_data["metadata"] + + saver.put(configs[0], checkpoints[0], metadata[0], {}) + saver.put(configs[1], checkpoints[1], metadata[1], {}) + saver.put(configs[2], checkpoints[2], metadata[2], {}) + + # call method / assertions + query_1 = {"source": "input"} # search by 1 key + query_2 = { + "step": 1, + "writes": {"foo": "bar"}, + } # search by multiple keys + query_3: dict[str, Any] = {} # search by no keys, return all checkpoints + query_4 = {"source": "update", "step": 1} # no match + + search_results_1 = list(saver.list(None, filter=query_1)) + assert len(search_results_1) == 1 + assert search_results_1[0].metadata == metadata[0] + + search_results_2 = list(saver.list(None, filter=query_2)) + assert len(search_results_2) == 1 + assert search_results_2[0].metadata == metadata[1] + + search_results_3 = list(saver.list(None, filter=query_3)) + assert len(search_results_3) == 3 + + search_results_4 = list(saver.list(None, filter=query_4)) + assert len(search_results_4) == 0 + + # search by config (defaults to checkpoints across all namespaces) + search_results_5 = list(saver.list({"configurable": {"thread_id": thread2.id}})) + assert len(search_results_5) == 2 + assert { + search_results_5[0].config["configurable"]["checkpoint_ns"], + search_results_5[1].config["configurable"]["checkpoint_ns"], + } == {"", "inner"} + + def test_channel_versions(self): + thread1 = Conversation.objects.create(user=self.user, team=self.team) + + chkpnt = { + "v": 1, + "ts": "2024-07-31T20:14:19.804150+00:00", + "id": str(uuid6(clock_seq=-2)), + "channel_values": { + "post": "hog", + "node": "node", + }, + "channel_versions": { + "__start__": 2, + "my_key": 3, + "start:node": 3, + "node": 3, + }, + "versions_seen": { + "__input__": {}, + "__start__": {"__start__": 1}, + "node": {"start:node": 2}, + }, + "pending_sends": [], + } + metadata = {"meta": "key"} + + write_config = {"configurable": {"thread_id": thread1.id, "checkpoint_ns": ""}} + read_config = {"configurable": {"thread_id": thread1.id}} + + saver = DjangoCheckpointer() + saver.put(write_config, chkpnt, metadata, {}) + + checkpoint = ConversationCheckpoint.objects.first() + self.assertIsNotNone(checkpoint) + self.assertEqual(checkpoint.thread, thread1) + self.assertEqual(checkpoint.checkpoint_ns, "") + self.assertEqual(str(checkpoint.id), chkpnt["id"]) + self.assertIsNone(checkpoint.parent_checkpoint) + chkpnt.pop("channel_values") + self.assertEqual(checkpoint.checkpoint, chkpnt) + self.assertEqual(checkpoint.metadata, metadata) + + checkpoints = list(saver.list(read_config)) + self.assertEqual(len(checkpoints), 1) + + checkpoint = saver.get(read_config) + self.assertEqual(checkpoint, checkpoints[0].checkpoint) + + def test_put_copies_checkpoint(self): + thread1 = Conversation.objects.create(user=self.user, team=self.team) + chkpnt = { + "v": 1, + "ts": "2024-07-31T20:14:19.804150+00:00", + "id": str(uuid6(clock_seq=-2)), + "channel_values": { + "post": "hog", + "node": "node", + }, + "channel_versions": { + "__start__": 2, + "my_key": 3, + "start:node": 3, + "node": 3, + }, + "versions_seen": { + "__input__": {}, + "__start__": {"__start__": 1}, + "node": {"start:node": 2}, + }, + "pending_sends": [], + } + metadata = {"meta": "key"} + write_config = {"configurable": {"thread_id": thread1.id, "checkpoint_ns": ""}} + saver = DjangoCheckpointer() + saver.put(write_config, chkpnt, metadata, {}) + self.assertIn("channel_values", chkpnt) + + def test_concurrent_puts_and_put_writes(self): + graph: CompiledStateGraph = self._build_graph(DjangoCheckpointer()) + thread = Conversation.objects.create(user=self.user, team=self.team) + config = {"configurable": {"thread_id": str(thread.id)}} + graph.invoke( + {"val": 0}, + config=config, + ) + self.assertEqual(len(ConversationCheckpoint.objects.all()), 4) + self.assertEqual(len(ConversationCheckpointBlob.objects.all()), 10) + self.assertEqual(len(ConversationCheckpointWrite.objects.all()), 6) + + def test_resuming(self): + checkpointer = DjangoCheckpointer() + graph: CompiledStateGraph = self._build_graph(checkpointer) + thread = Conversation.objects.create(user=self.user, team=self.team) + config = {"configurable": {"thread_id": str(thread.id)}} + + graph.invoke( + {"val": 1}, + config=config, + ) + snapshot = graph.get_state(config) + self.assertIsNotNone(snapshot.next) + self.assertEqual(snapshot.tasks[0].interrupts[0].value, "test") + + self.assertEqual(len(ConversationCheckpoint.objects.all()), 2) + self.assertEqual(len(ConversationCheckpointBlob.objects.all()), 4) + self.assertEqual(len(ConversationCheckpointWrite.objects.all()), 3) + self.assertEqual(len(list(checkpointer.list(config))), 2) + + latest_checkpoint = ConversationCheckpoint.objects.last() + latest_write = ConversationCheckpointWrite.objects.filter(checkpoint=latest_checkpoint).first() + actual_checkpoint = checkpointer.get_tuple(config) + self.assertIsNotNone(actual_checkpoint) + self.assertIsNotNone(latest_write) + self.assertEqual(len(latest_checkpoint.writes.all()), 1) + blobs = list(latest_checkpoint.blobs.all()) + self.assertEqual(len(blobs), 3) + self.assertEqual(actual_checkpoint.checkpoint["id"], str(latest_checkpoint.id)) + self.assertEqual(len(actual_checkpoint.pending_writes), 1) + self.assertEqual(actual_checkpoint.pending_writes[0][0], str(latest_write.task_id)) + + graph.update_state(config, {"val": 2}) + # add the value update checkpoint + self.assertEqual(len(ConversationCheckpoint.objects.all()), 3) + self.assertEqual(len(ConversationCheckpointBlob.objects.all()), 6) + self.assertEqual(len(ConversationCheckpointWrite.objects.all()), 5) + self.assertEqual(len(list(checkpointer.list(config))), 3) + + res = graph.invoke(None, config=config) + self.assertEqual(len(ConversationCheckpoint.objects.all()), 5) + self.assertEqual(len(ConversationCheckpointBlob.objects.all()), 12) + self.assertEqual(len(ConversationCheckpointWrite.objects.all()), 9) + self.assertEqual(len(list(checkpointer.list(config))), 5) + self.assertEqual(res, {"val": 3}) + snapshot = graph.get_state(config) + self.assertFalse(snapshot.next) diff --git a/ee/hogai/eval/tests/test_eval_funnel_generator.py b/ee/hogai/eval/tests/test_eval_funnel_generator.py index cd7e93b260ae9..4d7876ca6f73c 100644 --- a/ee/hogai/eval/tests/test_eval_funnel_generator.py +++ b/ee/hogai/eval/tests/test_eval_funnel_generator.py @@ -1,9 +1,11 @@ +from typing import cast + from langgraph.graph.state import CompiledStateGraph from ee.hogai.assistant import AssistantGraph from ee.hogai.eval.utils import EvalBaseTest -from ee.hogai.utils import AssistantNodeName -from posthog.schema import AssistantFunnelsQuery, HumanMessage +from ee.hogai.utils.types import AssistantNodeName, AssistantState +from posthog.schema import AssistantFunnelsQuery, HumanMessage, VisualizationMessage class TestEvalFunnelGenerator(EvalBaseTest): @@ -14,8 +16,11 @@ def _call_node(self, query: str, plan: str) -> AssistantFunnelsQuery: .add_funnel_generator(AssistantNodeName.END) .compile() ) - state = graph.invoke({"messages": [HumanMessage(content=query)], "plan": plan}) - return state["messages"][-1].answer + state = graph.invoke( + AssistantState(messages=[HumanMessage(content=query)], plan=plan), + self._get_config(), + ) + return cast(VisualizationMessage, AssistantState.model_validate(state).messages[-1]).answer def test_node_replaces_equals_with_contains(self): query = "what is the conversion rate from a page view to sign up for users with name John?" diff --git a/ee/hogai/eval/tests/test_eval_funnel_planner.py b/ee/hogai/eval/tests/test_eval_funnel_planner.py index 3760961f9bb03..9adbd75e77c6c 100644 --- a/ee/hogai/eval/tests/test_eval_funnel_planner.py +++ b/ee/hogai/eval/tests/test_eval_funnel_planner.py @@ -5,7 +5,7 @@ from ee.hogai.assistant import AssistantGraph from ee.hogai.eval.utils import EvalBaseTest -from ee.hogai.utils import AssistantNodeName +from ee.hogai.utils.types import AssistantNodeName, AssistantState from posthog.schema import HumanMessage @@ -40,8 +40,11 @@ def _call_node(self, query): .add_funnel_planner(AssistantNodeName.END) .compile() ) - state = graph.invoke({"messages": [HumanMessage(content=query)]}) - return state["plan"] + state = graph.invoke( + AssistantState(messages=[HumanMessage(content=query)]), + self._get_config(), + ) + return AssistantState.model_validate(state).plan or "" def test_basic_funnel(self): query = "what was the conversion from a page view to sign up?" diff --git a/ee/hogai/eval/tests/test_eval_router.py b/ee/hogai/eval/tests/test_eval_router.py index 25a84769dbfc8..c1307e9d40f00 100644 --- a/ee/hogai/eval/tests/test_eval_router.py +++ b/ee/hogai/eval/tests/test_eval_router.py @@ -1,8 +1,10 @@ +from typing import cast + from langgraph.graph.state import CompiledStateGraph from ee.hogai.assistant import AssistantGraph from ee.hogai.eval.utils import EvalBaseTest -from ee.hogai.utils import AssistantNodeName +from ee.hogai.utils.types import AssistantNodeName, AssistantState from posthog.schema import HumanMessage, RouterMessage @@ -15,8 +17,11 @@ def _call_node(self, query: str | list): .compile() ) messages = [HumanMessage(content=query)] if isinstance(query, str) else query - state = graph.invoke({"messages": messages}) - return state["messages"][-1].content + state = graph.invoke( + AssistantState(messages=messages), + self._get_config(), + ) + return cast(RouterMessage, AssistantState.model_validate(state).messages[-1]).content def test_outputs_basic_trends_insight(self): query = "Show the $pageview trend" diff --git a/ee/hogai/eval/tests/test_eval_trends_generator.py b/ee/hogai/eval/tests/test_eval_trends_generator.py index c5341584ca2f7..496bbf0100b51 100644 --- a/ee/hogai/eval/tests/test_eval_trends_generator.py +++ b/ee/hogai/eval/tests/test_eval_trends_generator.py @@ -1,9 +1,11 @@ +from typing import cast + from langgraph.graph.state import CompiledStateGraph from ee.hogai.assistant import AssistantGraph from ee.hogai.eval.utils import EvalBaseTest -from ee.hogai.utils import AssistantNodeName -from posthog.schema import AssistantTrendsQuery, HumanMessage +from ee.hogai.utils.types import AssistantNodeName, AssistantState +from posthog.schema import AssistantTrendsQuery, HumanMessage, VisualizationMessage class TestEvalTrendsGenerator(EvalBaseTest): @@ -14,8 +16,11 @@ def _call_node(self, query: str, plan: str) -> AssistantTrendsQuery: .add_trends_generator(AssistantNodeName.END) .compile() ) - state = graph.invoke({"messages": [HumanMessage(content=query)], "plan": plan}) - return state["messages"][-1].answer + state = graph.invoke( + AssistantState(messages=[HumanMessage(content=query)], plan=plan), + self._get_config(), + ) + return cast(VisualizationMessage, AssistantState.model_validate(state).messages[-1]).answer def test_node_replaces_equals_with_contains(self): query = "what is pageview trend for users with name John?" diff --git a/ee/hogai/eval/tests/test_eval_trends_planner.py b/ee/hogai/eval/tests/test_eval_trends_planner.py index e7ea741d03687..d4fbff456a91c 100644 --- a/ee/hogai/eval/tests/test_eval_trends_planner.py +++ b/ee/hogai/eval/tests/test_eval_trends_planner.py @@ -5,7 +5,7 @@ from ee.hogai.assistant import AssistantGraph from ee.hogai.eval.utils import EvalBaseTest -from ee.hogai.utils import AssistantNodeName +from ee.hogai.utils.types import AssistantNodeName, AssistantState from posthog.schema import HumanMessage @@ -40,8 +40,11 @@ def _call_node(self, query): .add_trends_planner(AssistantNodeName.END) .compile() ) - state = graph.invoke({"messages": [HumanMessage(content=query)]}) - return state["plan"] + state = graph.invoke( + AssistantState(messages=[HumanMessage(content=query)]), + self._get_config(), + ) + return AssistantState.model_validate(state).plan or "" def test_no_excessive_property_filters(self): query = "Show the $pageview trend" diff --git a/ee/hogai/eval/utils.py b/ee/hogai/eval/utils.py index 1e50a75daefa2..6e03c4cfafa9f 100644 --- a/ee/hogai/eval/utils.py +++ b/ee/hogai/eval/utils.py @@ -3,15 +3,25 @@ import pytest from django.test import override_settings from flaky import flaky +from langchain_core.runnables import RunnableConfig +from ee.models.assistant import Conversation from posthog.demo.matrix.manager import MatrixManager from posthog.tasks.demo_create_data import HedgeboxMatrix -from posthog.test.base import BaseTest +from posthog.test.base import NonAtomicBaseTest @pytest.mark.skipif(os.environ.get("DEEPEVAL") != "YES", reason="Only runs for the assistant evaluation") @flaky(max_runs=3, min_passes=1) -class EvalBaseTest(BaseTest): +class EvalBaseTest(NonAtomicBaseTest): + def _get_config(self) -> RunnableConfig: + conversation = Conversation.objects.create(team=self.team, user=self.user) + return { + "configurable": { + "thread_id": conversation.id, + } + } + @classmethod def setUpTestData(cls): super().setUpTestData() diff --git a/ee/hogai/funnels/nodes.py b/ee/hogai/funnels/nodes.py index a55bc223847f2..6f71305e0b796 100644 --- a/ee/hogai/funnels/nodes.py +++ b/ee/hogai/funnels/nodes.py @@ -6,12 +6,12 @@ from ee.hogai.schema_generator.nodes import SchemaGeneratorNode, SchemaGeneratorToolsNode from ee.hogai.schema_generator.utils import SchemaGeneratorOutput from ee.hogai.taxonomy_agent.nodes import TaxonomyAgentPlannerNode, TaxonomyAgentPlannerToolsNode -from ee.hogai.utils import AssistantState +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import AssistantFunnelsQuery class FunnelPlannerNode(TaxonomyAgentPlannerNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: toolkit = FunnelsTaxonomyAgentToolkit(self._team) prompt = ChatPromptTemplate.from_messages( [ @@ -23,7 +23,7 @@ def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: class FunnelPlannerToolsNode(TaxonomyAgentPlannerToolsNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: toolkit = FunnelsTaxonomyAgentToolkit(self._team) return super()._run_with_toolkit(state, toolkit, config=config) @@ -36,7 +36,7 @@ class FunnelGeneratorNode(SchemaGeneratorNode[AssistantFunnelsQuery]): OUTPUT_MODEL = FunnelsSchemaGeneratorOutput OUTPUT_SCHEMA = FUNNEL_SCHEMA - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: prompt = ChatPromptTemplate.from_messages( [ ("system", FUNNEL_SYSTEM_PROMPT), diff --git a/ee/hogai/funnels/prompts.py b/ee/hogai/funnels/prompts.py index b2deec894a070..3808809c173a7 100644 --- a/ee/hogai/funnels/prompts.py +++ b/ee/hogai/funnels/prompts.py @@ -12,6 +12,8 @@ {{react_format}} +{{react_human_in_the_loop}} + Below you will find information on how to correctly discover the taxonomy of the user's data. diff --git a/ee/hogai/funnels/test/test_nodes.py b/ee/hogai/funnels/test/test_nodes.py index 5c65b14110599..4f4e9fca0e5d4 100644 --- a/ee/hogai/funnels/test/test_nodes.py +++ b/ee/hogai/funnels/test/test_nodes.py @@ -4,6 +4,7 @@ from langchain_core.runnables import RunnableLambda from ee.hogai.funnels.nodes import FunnelGeneratorNode, FunnelsSchemaGeneratorOutput +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import ( AssistantFunnelsQuery, HumanMessage, @@ -15,6 +16,7 @@ @override_settings(IN_UNIT_TESTING=True) class TestFunnelsGeneratorNode(ClickhouseTestMixin, APIBaseTest): def setUp(self): + super().setUp() self.schema = AssistantFunnelsQuery(series=[]) def test_node_runs(self): @@ -24,16 +26,13 @@ def test_node_runs(self): lambda _: FunnelsSchemaGeneratorOutput(query=self.schema).model_dump() ) new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "plan": "Plan", - }, + AssistantState(messages=[HumanMessage(content="Text")], plan="Plan"), {}, ) self.assertEqual( new_state, - { - "messages": [VisualizationMessage(answer=self.schema, plan="Plan", done=True)], - "intermediate_steps": None, - }, + PartialAssistantState( + messages=[VisualizationMessage(answer=self.schema, plan="Plan", id=new_state.messages[0].id)], + intermediate_steps=None, + ), ) diff --git a/ee/hogai/funnels/toolkit.py b/ee/hogai/funnels/toolkit.py index 8d6407027aac1..ae603519cc331 100644 --- a/ee/hogai/funnels/toolkit.py +++ b/ee/hogai/funnels/toolkit.py @@ -1,5 +1,5 @@ from ee.hogai.taxonomy_agent.toolkit import TaxonomyAgentToolkit, ToolkitTool -from ee.hogai.utils import dereference_schema +from ee.hogai.utils.helpers import dereference_schema from posthog.schema import AssistantFunnelsQuery diff --git a/ee/hogai/graph.py b/ee/hogai/graph.py index 79e5f914097ce..bf961d6bb9aa8 100644 --- a/ee/hogai/graph.py +++ b/ee/hogai/graph.py @@ -1,10 +1,10 @@ from collections.abc import Hashable from typing import Optional, cast -from langfuse.callback import CallbackHandler +from langchain_core.runnables.base import RunnableLike from langgraph.graph.state import StateGraph -from ee import settings +from ee.hogai.django_checkpoint.checkpointer import DjangoCheckpointer from ee.hogai.funnels.nodes import ( FunnelGeneratorNode, FunnelGeneratorToolsNode, @@ -19,15 +19,10 @@ TrendsPlannerNode, TrendsPlannerToolsNode, ) -from ee.hogai.utils import AssistantNodeName, AssistantState +from ee.hogai.utils.types import AssistantNodeName, AssistantState from posthog.models.team.team import Team -if settings.LANGFUSE_PUBLIC_KEY: - langfuse_handler = CallbackHandler( - public_key=settings.LANGFUSE_PUBLIC_KEY, secret_key=settings.LANGFUSE_SECRET_KEY, host=settings.LANGFUSE_HOST - ) -else: - langfuse_handler = None +checkpointer = DjangoCheckpointer() class AssistantGraph: @@ -45,10 +40,14 @@ def add_edge(self, from_node: AssistantNodeName, to_node: AssistantNodeName): self._graph.add_edge(from_node, to_node) return self + def add_node(self, node: AssistantNodeName, action: RunnableLike): + self._graph.add_node(node, action) + return self + def compile(self): if not self._has_start_node: raise ValueError("Start node not added to the graph") - return self._graph.compile() + return self._graph.compile(checkpointer=checkpointer) def add_start(self): return self.add_edge(AssistantNodeName.START, AssistantNodeName.ROUTER) diff --git a/ee/hogai/router/nodes.py b/ee/hogai/router/nodes.py index c9151faaabc29..f6aeacdebbe6b 100644 --- a/ee/hogai/router/nodes.py +++ b/ee/hogai/router/nodes.py @@ -1,4 +1,5 @@ from typing import Literal, cast +from uuid import uuid4 from langchain_core.messages import AIMessage as LangchainAIMessage, BaseMessage from langchain_core.prompts import ChatPromptTemplate @@ -11,7 +12,8 @@ ROUTER_SYSTEM_PROMPT, ROUTER_USER_PROMPT, ) -from ee.hogai.utils import AssistantState, AssistantNode +from ee.hogai.utils.nodes import AssistantNode +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import HumanMessage, RouterMessage RouteName = Literal["trends", "funnel"] @@ -22,7 +24,7 @@ class RouterOutput(BaseModel): class RouterNode(AssistantNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: prompt = ChatPromptTemplate.from_messages( [ ("system", ROUTER_SYSTEM_PROMPT), @@ -31,10 +33,10 @@ def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: ) + self._construct_messages(state) chain = prompt | self._model output: RouterOutput = chain.invoke({}, config) - return {"messages": [RouterMessage(content=output.visualization_type)]} + return PartialAssistantState(messages=[RouterMessage(content=output.visualization_type, id=str(uuid4()))]) def router(self, state: AssistantState) -> RouteName: - last_message = state["messages"][-1] + last_message = state.messages[-1] if isinstance(last_message, RouterMessage): return cast(RouteName, last_message.content) raise ValueError("Invalid route.") @@ -47,7 +49,7 @@ def _model(self): def _construct_messages(self, state: AssistantState): history: list[BaseMessage] = [] - for message in state["messages"]: + for message in state.messages: if isinstance(message, HumanMessage): history += ChatPromptTemplate.from_messages( [("user", ROUTER_USER_PROMPT.strip())], template_format="mustache" diff --git a/ee/hogai/router/test/test_nodes.py b/ee/hogai/router/test/test_nodes.py index 06014fb0b9f59..53074a381b804 100644 --- a/ee/hogai/router/test/test_nodes.py +++ b/ee/hogai/router/test/test_nodes.py @@ -2,11 +2,11 @@ from unittest.mock import patch from django.test import override_settings -from langchain_core.messages import AIMessage as LangchainAIMessage -from langchain_core.messages import HumanMessage as LangchainHumanMessage +from langchain_core.messages import AIMessage as LangchainAIMessage, HumanMessage as LangchainHumanMessage from langchain_core.runnables import RunnableLambda from ee.hogai.router.nodes import RouterNode, RouterOutput +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import ( HumanMessage, RouterMessage, @@ -19,7 +19,7 @@ class TestRouterNode(ClickhouseTestMixin, APIBaseTest): def test_router(self): node = RouterNode(self.team) - state: Any = {"messages": [RouterMessage(content="trends")]} + state: Any = AssistantState(messages=[RouterMessage(content="trends")]) self.assertEqual(node.router(state), "trends") def test_node_runs(self): @@ -28,28 +28,36 @@ def test_node_runs(self): return_value=RunnableLambda(lambda _: RouterOutput(visualization_type="funnel")), ): node = RouterNode(self.team) - state: Any = {"messages": [HumanMessage(content="generate trends")]} - self.assertEqual(node.run(state, {}), {"messages": [RouterMessage(content="funnel")]}) + state: Any = AssistantState(messages=[HumanMessage(content="generate trends")]) + next_state = node.run(state, {}) + self.assertEqual( + next_state, + PartialAssistantState(messages=[RouterMessage(content="funnel", id=next_state.messages[0].id)]), + ) with patch( "ee.hogai.router.nodes.RouterNode._model", return_value=RunnableLambda(lambda _: RouterOutput(visualization_type="trends")), ): node = RouterNode(self.team) - state: Any = {"messages": [HumanMessage(content="generate trends")]} - self.assertEqual(node.run(state, {}), {"messages": [RouterMessage(content="trends")]}) + state: Any = AssistantState(messages=[HumanMessage(content="generate trends")]) + next_state = node.run(state, {}) + self.assertEqual( + next_state, + PartialAssistantState(messages=[RouterMessage(content="trends", id=next_state.messages[0].id)]), + ) def test_node_reconstructs_conversation(self): node = RouterNode(self.team) - state: Any = {"messages": [HumanMessage(content="generate trends")]} + state: Any = AssistantState(messages=[HumanMessage(content="generate trends")]) self.assertEqual(node._construct_messages(state), [LangchainHumanMessage(content="Question: generate trends")]) - state = { - "messages": [ + state = AssistantState( + messages=[ HumanMessage(content="generate trends"), RouterMessage(content="trends"), VisualizationMessage(), ] - } + ) self.assertEqual( node._construct_messages(state), [LangchainHumanMessage(content="Question: generate trends"), LangchainAIMessage(content="trends")], diff --git a/ee/hogai/schema_generator/nodes.py b/ee/hogai/schema_generator/nodes.py index f2d383d5c1e30..4bed02fd462cc 100644 --- a/ee/hogai/schema_generator/nodes.py +++ b/ee/hogai/schema_generator/nodes.py @@ -1,10 +1,16 @@ -import itertools import xml.etree.ElementTree as ET +from collections.abc import Sequence from functools import cached_property from typing import Generic, Optional, TypeVar +from uuid import uuid4 from langchain_core.agents import AgentAction -from langchain_core.messages import AIMessage as LangchainAssistantMessage, BaseMessage, merge_message_runs +from langchain_core.messages import ( + AIMessage as LangchainAssistantMessage, + BaseMessage, + HumanMessage as LangchainHumanMessage, + merge_message_runs, +) from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate from langchain_core.runnables import RunnableConfig from langchain_openai import ChatOpenAI @@ -23,10 +29,14 @@ QUESTION_PROMPT, ) from ee.hogai.schema_generator.utils import SchemaGeneratorOutput -from ee.hogai.utils import AssistantNode, AssistantState, filter_visualization_conversation +from ee.hogai.utils.helpers import find_last_message_of_type, slice_messages_to_conversation_start +from ee.hogai.utils.nodes import AssistantNode +from ee.hogai.utils.types import AssistantMessageUnion, AssistantState, PartialAssistantState from posthog.models.group_type_mapping import GroupTypeMapping from posthog.schema import ( + AssistantMessage, FailureMessage, + HumanMessage, VisualizationMessage, ) @@ -63,9 +73,10 @@ def _run_with_prompt( state: AssistantState, prompt: ChatPromptTemplate, config: Optional[RunnableConfig] = None, - ) -> AssistantState: - generated_plan = state.get("plan", "") - intermediate_steps = state.get("intermediate_steps") or [] + ) -> PartialAssistantState: + start_id = state.start_id + generated_plan = state.plan or "" + intermediate_steps = state.intermediate_steps or [] validation_error_message = intermediate_steps[-1][1] if intermediate_steps else None generation_prompt = prompt + self._construct_messages(state, validation_error_message=validation_error_message) @@ -79,35 +90,36 @@ def _run_with_prompt( except PydanticOutputParserException as e: # Generation step is expensive. After a second unsuccessful attempt, it's better to send a failure message. if len(intermediate_steps) >= 2: - return { - "messages": [ + return PartialAssistantState( + messages=[ FailureMessage( content=f"Oops! It looks like I’m having trouble generating this {self.INSIGHT_NAME} insight. Could you please try again?" ) ], - "intermediate_steps": None, - } + intermediate_steps=None, + ) - return { - "intermediate_steps": [ + return PartialAssistantState( + intermediate_steps=[ *intermediate_steps, (AgentAction("handle_incorrect_response", e.llm_output, e.validation_message), None), ], - } + ) - return { - "messages": [ + return PartialAssistantState( + messages=[ VisualizationMessage( plan=generated_plan, answer=message.query, - done=True, + initiator=start_id, + id=str(uuid4()), ) ], - "intermediate_steps": None, - } + intermediate_steps=None, + ) def router(self, state: AssistantState): - if state.get("intermediate_steps") is not None: + if state.intermediate_steps: return "tools" return "next" @@ -123,15 +135,25 @@ def _group_mapping_prompt(self) -> str: ) return ET.tostring(root, encoding="unicode") + def _get_human_viz_message_mapping(self, messages: Sequence[AssistantMessageUnion]) -> dict[str, int]: + mapping: dict[str, int] = {} + for idx, msg in enumerate(messages): + if isinstance(msg, VisualizationMessage) and msg.initiator is not None: + mapping[msg.initiator] = idx + return mapping + def _construct_messages( self, state: AssistantState, validation_error_message: Optional[str] = None ) -> list[BaseMessage]: """ Reconstruct the conversation for the generation. Take all previously generated questions, plans, and schemas, and return the history. """ - messages = state.get("messages", []) - generated_plan = state.get("plan", "") + messages = state.messages + generated_plan = state.plan + start_id = state.start_id + if start_id is not None: + messages = slice_messages_to_conversation_start(messages, start_id) if len(messages) == 0: return [] @@ -141,43 +163,61 @@ def _construct_messages( ) ] - human_messages, visualization_messages = filter_visualization_conversation(messages) - first_ai_message = True + msg_mapping = self._get_human_viz_message_mapping(messages) + initiator_message = messages[-1] + last_viz_message = find_last_message_of_type(messages, VisualizationMessage) + + for message in messages: + # The initial human message and the new plan are added to the end of the conversation. + if message == initiator_message: + continue + if isinstance(message, HumanMessage): + if message.id and (viz_message_idx := msg_mapping.get(message.id)): + # Plans go first. + viz_message = messages[viz_message_idx] + if isinstance(viz_message, VisualizationMessage): + conversation.append( + HumanMessagePromptTemplate.from_template(PLAN_PROMPT, template_format="mustache").format( + plan=viz_message.plan or "" + ) + ) - for idx, (human_message, ai_message) in enumerate( - itertools.zip_longest(human_messages, visualization_messages) - ): - # Plans go first - if ai_message: - conversation.append( - HumanMessagePromptTemplate.from_template( - PLAN_PROMPT if first_ai_message else NEW_PLAN_PROMPT, - template_format="mustache", - ).format(plan=ai_message.plan or "") - ) - first_ai_message = False - elif generated_plan: - conversation.append( - HumanMessagePromptTemplate.from_template( - PLAN_PROMPT if first_ai_message else NEW_PLAN_PROMPT, - template_format="mustache", - ).format(plan=generated_plan) + # Augment with the prompt previous initiator messages. + conversation.append( + HumanMessagePromptTemplate.from_template(QUESTION_PROMPT, template_format="mustache").format( + question=message.content + ) + ) + # Otherwise, just append the human message. + else: + conversation.append(LangchainHumanMessage(content=message.content)) + # Summary, human-in-the-loop messages. + elif isinstance(message, AssistantMessage): + conversation.append(LangchainAssistantMessage(content=message.content)) + + # Include only last generated schema because it doesn't need more context. + if last_viz_message: + conversation.append( + LangchainAssistantMessage( + content=last_viz_message.answer.model_dump_json() if last_viz_message.answer else "" ) - - # Then questions - if human_message: + ) + # Add the initiator message and the generated plan to the end, so instructions are clear. + if isinstance(initiator_message, HumanMessage): + if generated_plan: + plan_prompt = PLAN_PROMPT if messages[0] == initiator_message else NEW_PLAN_PROMPT conversation.append( - HumanMessagePromptTemplate.from_template(QUESTION_PROMPT, template_format="mustache").format( - question=human_message.content + HumanMessagePromptTemplate.from_template(plan_prompt, template_format="mustache").format( + plan=generated_plan or "" ) ) - - # Then schemas, but include only last generated schema because it doesn't need more context. - if ai_message and idx + 1 == len(visualization_messages): - conversation.append( - LangchainAssistantMessage(content=ai_message.answer.model_dump_json() if ai_message.answer else "") + conversation.append( + HumanMessagePromptTemplate.from_template(QUESTION_PROMPT, template_format="mustache").format( + question=initiator_message.content ) + ) + # Retries must be added to the end of the conversation. if validation_error_message: conversation.append( HumanMessagePromptTemplate.from_template(FAILOVER_PROMPT, template_format="mustache").format( @@ -193,10 +233,10 @@ class SchemaGeneratorToolsNode(AssistantNode): Used for failover from generation errors. """ - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: - intermediate_steps = state.get("intermediate_steps", []) + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: + intermediate_steps = state.intermediate_steps or [] if not intermediate_steps: - return state + return PartialAssistantState() action, _ = intermediate_steps[-1] prompt = ( @@ -205,9 +245,9 @@ def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: .content ) - return { - "intermediate_steps": [ + return PartialAssistantState( + intermediate_steps=[ *intermediate_steps[:-1], (action, str(prompt)), ] - } + ) diff --git a/ee/hogai/schema_generator/test/test_nodes.py b/ee/hogai/schema_generator/test/test_nodes.py index 795045af50b56..b44154b93b927 100644 --- a/ee/hogai/schema_generator/test/test_nodes.py +++ b/ee/hogai/schema_generator/test/test_nodes.py @@ -4,10 +4,11 @@ from django.test import override_settings from langchain_core.agents import AgentAction from langchain_core.prompts import ChatPromptTemplate -from langchain_core.runnables import RunnableLambda +from langchain_core.runnables import RunnableConfig, RunnableLambda from ee.hogai.schema_generator.nodes import SchemaGeneratorNode, SchemaGeneratorToolsNode from ee.hogai.schema_generator.utils import SchemaGeneratorOutput +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import ( AssistantMessage, AssistantTrendsQuery, @@ -16,7 +17,7 @@ RouterMessage, VisualizationMessage, ) -from posthog.test.base import APIBaseTest, ClickhouseTestMixin +from posthog.test.base import BaseTest TestSchema = SchemaGeneratorOutput[AssistantTrendsQuery] @@ -26,7 +27,7 @@ class DummyGeneratorNode(SchemaGeneratorNode[AssistantTrendsQuery]): OUTPUT_MODEL = SchemaGeneratorOutput[AssistantTrendsQuery] OUTPUT_SCHEMA = {} - def run(self, state, config): + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: prompt = ChatPromptTemplate.from_messages( [ ("system", "system_prompt"), @@ -36,8 +37,9 @@ def run(self, state, config): @override_settings(IN_UNIT_TESTING=True) -class TestSchemaGeneratorNode(ClickhouseTestMixin, APIBaseTest): +class TestSchemaGeneratorNode(BaseTest): def setUp(self): + super().setUp() self.schema = AssistantTrendsQuery(series=[]) def test_node_runs(self): @@ -45,23 +47,23 @@ def test_node_runs(self): with patch.object(DummyGeneratorNode, "_model") as generator_model_mock: generator_model_mock.return_value = RunnableLambda(lambda _: TestSchema(query=self.schema).model_dump()) new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "plan": "Plan", - }, + AssistantState( + messages=[HumanMessage(content="Text", id="0")], + plan="Plan", + start_id="0", + ), {}, ) - self.assertEqual( - new_state, - { - "messages": [VisualizationMessage(answer=self.schema, plan="Plan", done=True)], - "intermediate_steps": None, - }, - ) + self.assertIsNone(new_state.intermediate_steps) + self.assertEqual(len(new_state.messages), 1) + self.assertEqual(new_state.messages[0].type, "ai/viz") + self.assertEqual(new_state.messages[0].answer, self.schema) - def test_agent_reconstructs_conversation(self): + def test_agent_reconstructs_conversation_and_does_not_add_an_empty_plan(self): node = DummyGeneratorNode(self.team) - history = node._construct_messages({"messages": [HumanMessage(content="Text")]}) + history = node._construct_messages( + AssistantState(messages=[HumanMessage(content="Text", id="0")], start_id="0") + ) self.assertEqual(len(history), 2) self.assertEqual(history[0].type, "human") self.assertIn("mapping", history[0].content) @@ -69,7 +71,11 @@ def test_agent_reconstructs_conversation(self): self.assertIn("Answer to this question:", history[1].content) self.assertNotIn("{{question}}", history[1].content) - history = node._construct_messages({"messages": [HumanMessage(content="Text")], "plan": "randomplan"}) + def test_agent_reconstructs_conversation_adds_plan(self): + node = DummyGeneratorNode(self.team) + history = node._construct_messages( + AssistantState(messages=[HumanMessage(content="Text", id="0")], plan="randomplan", start_id="0") + ) self.assertEqual(len(history), 3) self.assertEqual(history[0].type, "human") self.assertIn("mapping", history[0].content) @@ -82,16 +88,18 @@ def test_agent_reconstructs_conversation(self): self.assertNotIn("{{question}}", history[2].content) self.assertIn("Text", history[2].content) + def test_agent_reconstructs_conversation_can_handle_follow_ups(self): node = DummyGeneratorNode(self.team) history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Text"), - VisualizationMessage(answer=self.schema, plan="randomplan"), - HumanMessage(content="Follow Up"), + AssistantState( + messages=[ + HumanMessage(content="Text", id="0"), + VisualizationMessage(answer=self.schema, plan="randomplan", id="1", initiator="0"), + HumanMessage(content="Follow Up", id="2"), ], - "plan": "newrandomplan", - } + plan="newrandomplan", + start_id="2", + ) ) self.assertEqual(len(history), 6) @@ -116,13 +124,41 @@ def test_agent_reconstructs_conversation(self): self.assertNotIn("{{question}}", history[5].content) self.assertIn("Follow Up", history[5].content) - def test_agent_reconstructs_conversation_and_merges_messages(self): + def test_agent_reconstructs_conversation_and_does_not_merge_messages(self): + node = DummyGeneratorNode(self.team) + history = node._construct_messages( + AssistantState( + messages=[HumanMessage(content="Te", id="0"), HumanMessage(content="xt", id="1")], + plan="randomplan", + start_id="1", + ) + ) + self.assertEqual(len(history), 4) + self.assertEqual(history[0].type, "human") + self.assertIn("mapping", history[0].content) + self.assertIn("Te", history[1].content) + self.assertEqual(history[2].type, "human") + self.assertNotIn("{{plan}}", history[2].content) + self.assertIn("randomplan", history[2].content) + self.assertEqual(history[3].type, "human") + self.assertIn("Answer to this question:", history[3].content) + self.assertNotIn("{{question}}", history[3].content) + self.assertEqual(history[3].type, "human") + self.assertIn("xt", history[3].content) + + def test_filters_out_human_in_the_loop_after_initiator(self): node = DummyGeneratorNode(self.team) history = node._construct_messages( - { - "messages": [HumanMessage(content="Te"), HumanMessage(content="xt")], - "plan": "randomplan", - } + AssistantState( + messages=[ + HumanMessage(content="Text", id="0"), + VisualizationMessage(answer=self.schema, plan="randomplan", initiator="0", id="1"), + HumanMessage(content="Follow", id="2"), + HumanMessage(content="Up", id="3"), + ], + plan="newrandomplan", + start_id="0", + ) ) self.assertEqual(len(history), 3) self.assertEqual(history[0].type, "human") @@ -134,104 +170,114 @@ def test_agent_reconstructs_conversation_and_merges_messages(self): self.assertEqual(history[2].type, "human") self.assertIn("Answer to this question:", history[2].content) self.assertNotIn("{{question}}", history[2].content) - self.assertIn("Te\nxt", history[2].content) + self.assertIn("Text", history[2].content) + def test_preserves_human_in_the_loop_before_initiator(self): node = DummyGeneratorNode(self.team) history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Text"), - VisualizationMessage(answer=self.schema, plan="randomplan"), - HumanMessage(content="Follow"), - HumanMessage(content="Up"), + AssistantState( + messages=[ + HumanMessage(content="Question 1", id="0"), + AssistantMessage(content="Loop", id="1"), + HumanMessage(content="Answer", id="2"), + VisualizationMessage(answer=self.schema, plan="randomplan", initiator="0", id="3"), + HumanMessage(content="Question 2", id="4"), ], - "plan": "newrandomplan", - } + plan="newrandomplan", + start_id="4", + ) ) - - self.assertEqual(len(history), 6) + self.assertEqual(len(history), 8) self.assertEqual(history[0].type, "human") self.assertIn("mapping", history[0].content) self.assertEqual(history[1].type, "human") self.assertIn("the plan", history[1].content) self.assertNotIn("{{plan}}", history[1].content) self.assertIn("randomplan", history[1].content) - self.assertEqual(history[2].type, "human") - self.assertIn("Answer to this question:", history[2].content) self.assertNotIn("{{question}}", history[2].content) - self.assertIn("Text", history[2].content) + self.assertIn("Question 1", history[2].content) self.assertEqual(history[3].type, "ai") - self.assertEqual(history[3].content, self.schema.model_dump_json()) + self.assertEqual("Loop", history[3].content) self.assertEqual(history[4].type, "human") - self.assertIn("the new plan", history[4].content) - self.assertNotIn("{{plan}}", history[4].content) - self.assertIn("newrandomplan", history[4].content) - self.assertEqual(history[5].type, "human") - self.assertIn("Answer to this question:", history[5].content) - self.assertNotIn("{{question}}", history[5].content) - self.assertIn("Follow\nUp", history[5].content) + self.assertEqual("Answer", history[4].content) + self.assertEqual(history[5].type, "ai") + self.assertEqual(history[6].type, "human") + self.assertIn("the new plan", history[6].content) + self.assertIn("newrandomplan", history[6].content) + self.assertEqual(history[7].type, "human") + self.assertNotIn("{{question}}", history[7].content) + self.assertIn("Question 2", history[7].content) def test_agent_reconstructs_typical_conversation(self): node = DummyGeneratorNode(self.team) history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Question 1"), - RouterMessage(content="trends"), - VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1"), - AssistantMessage(content="Summary 1"), - HumanMessage(content="Question 2"), - RouterMessage(content="funnel"), - VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2"), - AssistantMessage(content="Summary 2"), - HumanMessage(content="Question 3"), - RouterMessage(content="funnel"), + AssistantState( + messages=[ + HumanMessage(content="Question 1", id="0"), + RouterMessage(content="trends", id="1"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1", initiator="0", id="2"), + AssistantMessage(content="Summary 1", id="3"), + HumanMessage(content="Question 2", id="4"), + RouterMessage(content="funnel", id="5"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2", initiator="4", id="6"), + AssistantMessage(content="Summary 2", id="7"), + HumanMessage(content="Question 3", id="8"), + RouterMessage(content="funnel", id="9"), ], - "plan": "Plan 3", - } + plan="Plan 3", + start_id="8", + ) ) - self.assertEqual(len(history), 8) + + self.assertEqual(len(history), 10) self.assertEqual(history[0].type, "human") self.assertIn("mapping", history[0].content) self.assertEqual(history[1].type, "human") self.assertIn("Plan 1", history[1].content) self.assertEqual(history[2].type, "human") self.assertIn("Question 1", history[2].content) - self.assertEqual(history[3].type, "human") - self.assertIn("Plan 2", history[3].content) + self.assertEqual(history[3].type, "ai") + self.assertEqual(history[3].content, "Summary 1") self.assertEqual(history[4].type, "human") - self.assertIn("Question 2", history[4].content) - self.assertEqual(history[5].type, "ai") - self.assertEqual(history[6].type, "human") - self.assertIn("Plan 3", history[6].content) - self.assertEqual(history[7].type, "human") - self.assertIn("Question 3", history[7].content) - - def test_prompt(self): + self.assertIn("Plan 2", history[4].content) + self.assertEqual(history[5].type, "human") + self.assertIn("Question 2", history[5].content) + self.assertEqual(history[6].type, "ai") + self.assertEqual(history[6].content, "Summary 2") + self.assertEqual(history[7].type, "ai") + self.assertEqual(history[8].type, "human") + self.assertIn("Plan 3", history[8].content) + self.assertEqual(history[9].type, "human") + self.assertIn("Question 3", history[9].content) + + def test_prompt_messages_merged(self): node = DummyGeneratorNode(self.team) - state = { - "messages": [ - HumanMessage(content="Question 1"), - RouterMessage(content="trends"), - VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1"), - AssistantMessage(content="Summary 1"), - HumanMessage(content="Question 2"), - RouterMessage(content="funnel"), - VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2"), - AssistantMessage(content="Summary 2"), - HumanMessage(content="Question 3"), - RouterMessage(content="funnel"), + state = AssistantState( + messages=[ + HumanMessage(content="Question 1", id="0"), + RouterMessage(content="trends", id="1"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1", initiator="0", id="2"), + AssistantMessage(content="Summary 1", id="3"), + HumanMessage(content="Question 2", id="4"), + RouterMessage(content="funnel", id="5"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2", initiator="4", id="6"), + AssistantMessage(content="Summary 2", id="7"), + HumanMessage(content="Question 3", id="8"), + RouterMessage(content="funnel", id="9"), ], - "plan": "Plan 3", - } + plan="Plan 3", + start_id="8", + ) with patch.object(DummyGeneratorNode, "_model") as generator_model_mock: def assert_prompt(prompt): - self.assertEqual(len(prompt), 4) + self.assertEqual(len(prompt), 6) self.assertEqual(prompt[0].type, "system") self.assertEqual(prompt[1].type, "human") self.assertEqual(prompt[2].type, "ai") self.assertEqual(prompt[3].type, "human") + self.assertEqual(prompt[4].type, "ai") + self.assertEqual(prompt[5].type, "human") generator_model_mock.return_value = RunnableLambda(assert_prompt) node.run(state, {}) @@ -244,19 +290,17 @@ def test_failover_with_incorrect_schema(self): schema["query"] = [] generator_model_mock.return_value = RunnableLambda(lambda _: json.dumps(schema)) - new_state = node.run({"messages": [HumanMessage(content="Text")]}, {}) - self.assertIn("intermediate_steps", new_state) - self.assertEqual(len(new_state["intermediate_steps"]), 1) + new_state = node.run(AssistantState(messages=[HumanMessage(content="Text")]), {}) + self.assertEqual(len(new_state.intermediate_steps), 1) new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "intermediate_steps": [(AgentAction(tool="", tool_input="", log="exception"), "exception")], - }, + AssistantState( + messages=[HumanMessage(content="Text")], + intermediate_steps=[(AgentAction(tool="", tool_input="", log="exception"), "exception")], + ), {}, ) - self.assertIn("intermediate_steps", new_state) - self.assertEqual(len(new_state["intermediate_steps"]), 2) + self.assertEqual(len(new_state.intermediate_steps), 2) def test_node_leaves_failover(self): node = DummyGeneratorNode(self.team) @@ -266,25 +310,25 @@ def test_node_leaves_failover(self): return_value=RunnableLambda(lambda _: TestSchema(query=self.schema).model_dump()), ): new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "intermediate_steps": [(AgentAction(tool="", tool_input="", log="exception"), "exception")], - }, + AssistantState( + messages=[HumanMessage(content="Text")], + intermediate_steps=[(AgentAction(tool="", tool_input="", log="exception"), "exception")], + ), {}, ) - self.assertIsNone(new_state["intermediate_steps"]) + self.assertIsNone(new_state.intermediate_steps) new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "intermediate_steps": [ + AssistantState( + messages=[HumanMessage(content="Text")], + intermediate_steps=[ (AgentAction(tool="", tool_input="", log="exception"), "exception"), (AgentAction(tool="", tool_input="", log="exception"), "exception"), ], - }, + ), {}, ) - self.assertIsNone(new_state["intermediate_steps"]) + self.assertIsNone(new_state.intermediate_steps) def test_node_leaves_failover_after_second_unsuccessful_attempt(self): node = DummyGeneratorNode(self.team) @@ -295,29 +339,30 @@ def test_node_leaves_failover_after_second_unsuccessful_attempt(self): generator_model_mock.return_value = RunnableLambda(lambda _: json.dumps(schema)) new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "intermediate_steps": [ + AssistantState( + messages=[HumanMessage(content="Text")], + intermediate_steps=[ (AgentAction(tool="", tool_input="", log="exception"), "exception"), (AgentAction(tool="", tool_input="", log="exception"), "exception"), ], - }, + ), {}, ) - self.assertIsNone(new_state["intermediate_steps"]) - self.assertEqual(len(new_state["messages"]), 1) - self.assertIsInstance(new_state["messages"][0], FailureMessage) + self.assertIsNone(new_state.intermediate_steps) + self.assertEqual(len(new_state.messages), 1) + self.assertIsInstance(new_state.messages[0], FailureMessage) def test_agent_reconstructs_conversation_with_failover(self): action = AgentAction(tool="fix", tool_input="validation error", log="exception") node = DummyGeneratorNode(self.team) history = node._construct_messages( - { - "messages": [HumanMessage(content="Text")], - "plan": "randomplan", - "intermediate_steps": [(action, "uniqexception")], - }, - "uniqexception", + AssistantState( + messages=[HumanMessage(content="Text", id="0")], + plan="randomplan", + intermediate_steps=[(action, "uniqexception")], + start_id="0", + ), + validation_error_message="uniqexception", ) self.assertEqual(len(history), 4) self.assertEqual(history[0].type, "human") @@ -337,14 +382,14 @@ def test_agent_reconstructs_conversation_with_failover(self): def test_agent_reconstructs_conversation_with_failed_messages(self): node = DummyGeneratorNode(self.team) history = node._construct_messages( - { - "messages": [ + AssistantState( + messages=[ HumanMessage(content="Text"), FailureMessage(content="Error"), HumanMessage(content="Text"), ], - "plan": "randomplan", - }, + plan="randomplan", + ), ) self.assertEqual(len(history), 3) self.assertEqual(history[0].type, "human") @@ -360,19 +405,19 @@ def test_agent_reconstructs_conversation_with_failed_messages(self): def test_router(self): node = DummyGeneratorNode(self.team) - state = node.router({"messages": [], "intermediate_steps": None}) + state = node.router(AssistantState(messages=[], intermediate_steps=None)) self.assertEqual(state, "next") state = node.router( - {"messages": [], "intermediate_steps": [(AgentAction(tool="", tool_input="", log=""), None)]} + AssistantState(messages=[], intermediate_steps=[(AgentAction(tool="", tool_input="", log=""), None)]) ) self.assertEqual(state, "tools") -class TestSchemaGeneratorToolsNode(ClickhouseTestMixin, APIBaseTest): +class TestSchemaGeneratorToolsNode(BaseTest): def test_tools_node(self): node = SchemaGeneratorToolsNode(self.team) action = AgentAction(tool="fix", tool_input="validationerror", log="pydanticexception") - state = node.run({"messages": [], "intermediate_steps": [(action, None)]}, {}) - self.assertIsNotNone("validationerror", state["intermediate_steps"][0][1]) - self.assertIn("validationerror", state["intermediate_steps"][0][1]) - self.assertIn("pydanticexception", state["intermediate_steps"][0][1]) + state = node.run(AssistantState(messages=[], intermediate_steps=[(action, None)]), {}) + self.assertIsNotNone("validationerror", state.intermediate_steps[0][1]) + self.assertIn("validationerror", state.intermediate_steps[0][1]) + self.assertIn("pydanticexception", state.intermediate_steps[0][1]) diff --git a/ee/hogai/summarizer/nodes.py b/ee/hogai/summarizer/nodes.py index 8d5e8a406f45e..513246bcc1238 100644 --- a/ee/hogai/summarizer/nodes.py +++ b/ee/hogai/summarizer/nodes.py @@ -1,15 +1,18 @@ import json from time import sleep +from uuid import uuid4 + from django.conf import settings +from django.core.serializers.json import DjangoJSONEncoder from langchain_core.prompts import ChatPromptTemplate from langchain_core.runnables import RunnableConfig from langchain_openai import ChatOpenAI -from django.core.serializers.json import DjangoJSONEncoder from rest_framework.exceptions import APIException from sentry_sdk import capture_exception -from ee.hogai.summarizer.prompts import SUMMARIZER_SYSTEM_PROMPT, SUMMARIZER_INSTRUCTION_PROMPT -from ee.hogai.utils import AssistantNode, AssistantNodeName, AssistantState +from ee.hogai.summarizer.prompts import SUMMARIZER_INSTRUCTION_PROMPT, SUMMARIZER_SYSTEM_PROMPT +from ee.hogai.utils.nodes import AssistantNode +from ee.hogai.utils.types import AssistantNodeName, AssistantState, PartialAssistantState from posthog.api.services.query import process_query_dict from posthog.clickhouse.client.execute_async import get_query_status from posthog.errors import ExposedCHQueryError @@ -21,8 +24,8 @@ class SummarizerNode(AssistantNode): name = AssistantNodeName.SUMMARIZER - def run(self, state: AssistantState, config: RunnableConfig): - viz_message = state["messages"][-1] + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: + viz_message = state.messages[-1] if not isinstance(viz_message, VisualizationMessage): raise ValueError("Can only run summarization with a visualization message as the last one in the state") if viz_message.answer is None: @@ -58,10 +61,16 @@ def run(self, state: AssistantState, config: RunnableConfig): err_message = ", ".join(f"{key}: {value}" for key, value in err.detail.items()) elif isinstance(err.detail, list): err_message = ", ".join(map(str, err.detail)) - return {"messages": [FailureMessage(content=f"There was an error running this query: {err_message}")]} + return PartialAssistantState( + messages=[ + FailureMessage(content=f"There was an error running this query: {err_message}", id=str(uuid4())) + ] + ) except Exception as err: capture_exception(err) - return {"messages": [FailureMessage(content="There was an unknown error running this query.")]} + return PartialAssistantState( + messages=[FailureMessage(content="There was an unknown error running this query.", id=str(uuid4()))] + ) summarization_prompt = ChatPromptTemplate(self._construct_messages(state), template_format="mustache") @@ -76,7 +85,7 @@ def run(self, state: AssistantState, config: RunnableConfig): config, ) - return {"messages": [AssistantMessage(content=str(message.content), done=True)]} + return PartialAssistantState(messages=[AssistantMessage(content=str(message.content), id=str(uuid4()))]) @property def _model(self): @@ -85,7 +94,7 @@ def _model(self): def _construct_messages(self, state: AssistantState) -> list[tuple[str, str]]: conversation: list[tuple[str, str]] = [("system", SUMMARIZER_SYSTEM_PROMPT)] - for message in state.get("messages", []): + for message in state.messages: if isinstance(message, HumanMessage): conversation.append(("human", message.content)) elif isinstance(message, AssistantMessage): diff --git a/ee/hogai/summarizer/test/test_nodes.py b/ee/hogai/summarizer/test/test_nodes.py index b38d88275aa19..9c54517717b5f 100644 --- a/ee/hogai/summarizer/test/test_nodes.py +++ b/ee/hogai/summarizer/test/test_nodes.py @@ -1,23 +1,23 @@ from unittest.mock import patch from django.test import override_settings -from langchain_core.runnables import RunnableLambda from langchain_core.messages import ( HumanMessage as LangchainHumanMessage, ) +from langchain_core.runnables import RunnableLambda +from rest_framework.exceptions import ValidationError + from ee.hogai.summarizer.nodes import SummarizerNode from ee.hogai.summarizer.prompts import SUMMARIZER_INSTRUCTION_PROMPT, SUMMARIZER_SYSTEM_PROMPT +from ee.hogai.utils.types import AssistantState +from posthog.api.services.query import process_query_dict from posthog.schema import ( - AssistantMessage, AssistantTrendsEventsNode, AssistantTrendsQuery, - FailureMessage, HumanMessage, VisualizationMessage, ) -from rest_framework.exceptions import ValidationError from posthog.test.base import APIBaseTest, ClickhouseTestMixin -from posthog.api.services.query import process_query_dict @override_settings(IN_UNIT_TESTING=True) @@ -32,28 +32,26 @@ def test_node_runs(self, mock_process_query_dict): lambda _: LangchainHumanMessage(content="The results indicate foobar.") ) new_state = node.run( - { - "messages": [ - HumanMessage(content="Text"), + AssistantState( + messages=[ + HumanMessage(content="Text", id="test"), VisualizationMessage( answer=AssistantTrendsQuery(series=[AssistantTrendsEventsNode()]), plan="Plan", - done=True, + id="test2", + initiator="test", ), ], - "plan": "Plan", - }, + plan="Plan", + start_id="test", + ), {}, ) mock_process_query_dict.assert_called_once() # Query processing started - self.assertEqual( - new_state, - { - "messages": [ - AssistantMessage(content="The results indicate foobar.", done=True), - ], - }, - ) + msg = new_state.messages[0] + self.assertEqual(msg.content, "The results indicate foobar.") + self.assertEqual(msg.type, "ai") + self.assertIsNotNone(msg.id) @patch( "ee.hogai.summarizer.nodes.process_query_dict", @@ -66,28 +64,26 @@ def test_node_handles_internal_error(self, mock_process_query_dict): lambda _: LangchainHumanMessage(content="The results indicate foobar.") ) new_state = node.run( - { - "messages": [ - HumanMessage(content="Text"), + AssistantState( + messages=[ + HumanMessage(content="Text", id="test"), VisualizationMessage( answer=AssistantTrendsQuery(series=[AssistantTrendsEventsNode()]), plan="Plan", - done=True, + id="test2", + initiator="test", ), ], - "plan": "Plan", - }, + plan="Plan", + start_id="test", + ), {}, ) mock_process_query_dict.assert_called_once() # Query processing started - self.assertEqual( - new_state, - { - "messages": [ - FailureMessage(content="There was an unknown error running this query."), - ], - }, - ) + msg = new_state.messages[0] + self.assertEqual(msg.content, "There was an unknown error running this query.") + self.assertEqual(msg.type, "ai/failure") + self.assertIsNotNone(msg.id) @patch( "ee.hogai.summarizer.nodes.process_query_dict", @@ -102,33 +98,29 @@ def test_node_handles_exposed_error(self, mock_process_query_dict): lambda _: LangchainHumanMessage(content="The results indicate foobar.") ) new_state = node.run( - { - "messages": [ - HumanMessage(content="Text"), + AssistantState( + messages=[ + HumanMessage(content="Text", id="test"), VisualizationMessage( answer=AssistantTrendsQuery(series=[AssistantTrendsEventsNode()]), plan="Plan", - done=True, + id="test2", + initiator="test", ), ], - "plan": "Plan", - }, + plan="Plan", + start_id="test", + ), {}, ) mock_process_query_dict.assert_called_once() # Query processing started + msg = new_state.messages[0] self.assertEqual( - new_state, - { - "messages": [ - FailureMessage( - content=( - "There was an error running this query: This query exceeds the capabilities of our picolator. " - "Try de-brolling its flim-flam." - ) - ), - ], - }, + msg.content, + "There was an error running this query: This query exceeds the capabilities of our picolator. Try de-brolling its flim-flam.", ) + self.assertEqual(msg.type, "ai/failure") + self.assertIsNotNone(msg.id) def test_node_requires_a_viz_message_in_state(self): node = SummarizerNode(self.team) @@ -137,12 +129,13 @@ def test_node_requires_a_viz_message_in_state(self): ValueError, "Can only run summarization with a visualization message as the last one in the state" ): node.run( - { - "messages": [ + AssistantState( + messages=[ HumanMessage(content="Text"), ], - "plan": "Plan", - }, + plan="Plan", + start_id="test", + ), {}, ) @@ -151,16 +144,13 @@ def test_node_requires_viz_message_in_state_to_have_query(self): with self.assertRaisesMessage(ValueError, "Did not found query in the visualization message"): node.run( - { - "messages": [ - VisualizationMessage( - answer=None, - plan="Plan", - done=True, - ), + AssistantState( + messages=[ + VisualizationMessage(answer=None, plan="Plan", id="test"), ], - "plan": "Plan", - }, + plan="Plan", + start_id="test", + ), {}, ) @@ -170,16 +160,18 @@ def test_agent_reconstructs_conversation(self): node = SummarizerNode(self.team) history = node._construct_messages( - { - "messages": [ - HumanMessage(content="What's the trends in signups?"), + AssistantState( + messages=[ + HumanMessage(content="What's the trends in signups?", id="test"), VisualizationMessage( answer=AssistantTrendsQuery(series=[AssistantTrendsEventsNode()]), plan="Plan", - done=True, + id="test2", + initiator="test", ), - ] - } + ], + start_id="test", + ) ) self.assertEqual( history, diff --git a/ee/hogai/taxonomy_agent/nodes.py b/ee/hogai/taxonomy_agent/nodes.py index 025058a51eec1..bd26a7a93918f 100644 --- a/ee/hogai/taxonomy_agent/nodes.py +++ b/ee/hogai/taxonomy_agent/nodes.py @@ -1,4 +1,3 @@ -import itertools import xml.etree.ElementTree as ET from abc import ABC from functools import cached_property @@ -7,10 +6,16 @@ from git import Optional from langchain.agents.format_scratchpad import format_log_to_str from langchain_core.agents import AgentAction -from langchain_core.messages import AIMessage as LangchainAssistantMessage, BaseMessage, merge_message_runs +from langchain_core.messages import ( + AIMessage as LangchainAssistantMessage, + BaseMessage, + HumanMessage as LangchainHumanMessage, + merge_message_runs, +) from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate from langchain_core.runnables import RunnableConfig from langchain_openai import ChatOpenAI +from langgraph.errors import NodeInterrupt from pydantic import ValidationError from ee.hogai.taxonomy import CORE_FILTER_DEFINITIONS_BY_GROUP @@ -24,6 +29,7 @@ REACT_FOLLOW_UP_PROMPT, REACT_FORMAT_PROMPT, REACT_FORMAT_REMINDER_PROMPT, + REACT_HUMAN_IN_THE_LOOP_PROMPT, REACT_MALFORMED_JSON_PROMPT, REACT_MISSING_ACTION_CORRECTION_PROMPT, REACT_MISSING_ACTION_PROMPT, @@ -33,13 +39,18 @@ REACT_USER_PROMPT, ) from ee.hogai.taxonomy_agent.toolkit import TaxonomyAgentTool, TaxonomyAgentToolkit -from ee.hogai.utils import AssistantNode, AssistantState, filter_visualization_conversation, remove_line_breaks +from ee.hogai.utils.helpers import filter_messages, remove_line_breaks, slice_messages_to_conversation_start +from ee.hogai.utils.nodes import AssistantNode +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.hogql_queries.ai.team_taxonomy_query_runner import TeamTaxonomyQueryRunner from posthog.hogql_queries.query_runner import ExecutionMode from posthog.models.group_type_mapping import GroupTypeMapping from posthog.schema import ( + AssistantMessage, CachedTeamTaxonomyQueryResponse, + HumanMessage, TeamTaxonomyQuery, + VisualizationMessage, ) @@ -50,8 +61,8 @@ def _run_with_prompt_and_toolkit( prompt: ChatPromptTemplate, toolkit: TaxonomyAgentToolkit, config: Optional[RunnableConfig] = None, - ) -> AssistantState: - intermediate_steps = state.get("intermediate_steps") or [] + ) -> PartialAssistantState: + intermediate_steps = state.intermediate_steps or [] conversation = ( prompt + ChatPromptTemplate.from_messages( @@ -79,6 +90,7 @@ def _run_with_prompt_and_toolkit( "react_format": self._get_react_format_prompt(toolkit), "react_format_reminder": REACT_FORMAT_REMINDER_PROMPT, "react_property_filters": self._get_react_property_filters_prompt(), + "react_human_in_the_loop": REACT_HUMAN_IN_THE_LOOP_PROMPT, "product_description": self._team.project.product_description, "groups": self._team_group_types, "events": self._events_prompt, @@ -108,12 +120,12 @@ def _run_with_prompt_and_toolkit( e.llm_output, ) - return { - "intermediate_steps": [*intermediate_steps, (result, None)], - } + return PartialAssistantState( + intermediate_steps=[*intermediate_steps, (result, None)], + ) def router(self, state: AssistantState): - if state.get("intermediate_steps", []): + if state.intermediate_steps: return "tools" raise ValueError("Invalid state.") @@ -188,33 +200,34 @@ def _construct_messages(self, state: AssistantState) -> list[BaseMessage]: """ Reconstruct the conversation for the agent. On this step we only care about previously asked questions and generated plans. All other messages are filtered out. """ - human_messages, visualization_messages = filter_visualization_conversation(state.get("messages", [])) - - if not human_messages: - return [] - + start_id = state.start_id + filtered_messages = filter_messages(slice_messages_to_conversation_start(state.messages, start_id)) conversation = [] - for idx, messages in enumerate(itertools.zip_longest(human_messages, visualization_messages)): - human_message, viz_message = messages - - if human_message: + for idx, message in enumerate(filtered_messages): + if isinstance(message, HumanMessage): + # Add initial instructions. if idx == 0: conversation.append( HumanMessagePromptTemplate.from_template(REACT_USER_PROMPT, template_format="mustache").format( - question=human_message.content + question=message.content ) ) - else: + # Add follow-up instructions only for the human message that initiated a generation. + elif message.id == start_id: conversation.append( HumanMessagePromptTemplate.from_template( REACT_FOLLOW_UP_PROMPT, template_format="mustache", - ).format(feedback=human_message.content) + ).format(feedback=message.content) ) - - if viz_message: - conversation.append(LangchainAssistantMessage(content=viz_message.plan or "")) + # Everything else leave as is. + else: + conversation.append(LangchainHumanMessage(content=message.content)) + elif isinstance(message, VisualizationMessage): + conversation.append(LangchainAssistantMessage(content=message.plan or "")) + elif isinstance(message, AssistantMessage): + conversation.append(LangchainAssistantMessage(content=message.content)) return conversation @@ -230,26 +243,37 @@ def _get_agent_scratchpad(self, scratchpad: list[tuple[AgentAction, str | None]] class TaxonomyAgentPlannerToolsNode(AssistantNode, ABC): def _run_with_toolkit( self, state: AssistantState, toolkit: TaxonomyAgentToolkit, config: Optional[RunnableConfig] = None - ) -> AssistantState: - intermediate_steps = state.get("intermediate_steps") or [] - action, _ = intermediate_steps[-1] + ) -> PartialAssistantState: + intermediate_steps = state.intermediate_steps or [] + action, observation = intermediate_steps[-1] try: input = TaxonomyAgentTool.model_validate({"name": action.tool, "arguments": action.tool_input}).root except ValidationError as e: - observation = ( + observation = str( ChatPromptTemplate.from_template(REACT_PYDANTIC_VALIDATION_EXCEPTION_PROMPT, template_format="mustache") .format_messages(exception=e.errors(include_url=False))[0] .content ) - return {"intermediate_steps": [*intermediate_steps[:-1], (action, str(observation))]} + return PartialAssistantState( + intermediate_steps=[*intermediate_steps[:-1], (action, str(observation))], + ) # The plan has been found. Move to the generation. if input.name == "final_answer": - return { - "plan": input.arguments, - "intermediate_steps": None, - } + return PartialAssistantState( + plan=input.arguments, + intermediate_steps=[], + ) + if input.name == "ask_user_for_help": + # The agent has requested help, so we interrupt the graph. + if not observation: + raise NodeInterrupt(input.arguments) + + # Feedback was provided. + return PartialAssistantState( + intermediate_steps=[*intermediate_steps[:-1], (action, observation)], + ) output = "" if input.name == "retrieve_event_properties": @@ -263,9 +287,11 @@ def _run_with_toolkit( else: output = toolkit.handle_incorrect_response(input.arguments) - return {"intermediate_steps": [*intermediate_steps[:-1], (action, output)]} + return PartialAssistantState( + intermediate_steps=[*intermediate_steps[:-1], (action, output)], + ) def router(self, state: AssistantState): - if state.get("plan") is not None: + if state.plan is not None: return "plan_found" return "continue" diff --git a/ee/hogai/taxonomy_agent/prompts.py b/ee/hogai/taxonomy_agent/prompts.py index f63a7dfe15455..c9d409bcdf103 100644 --- a/ee/hogai/taxonomy_agent/prompts.py +++ b/ee/hogai/taxonomy_agent/prompts.py @@ -81,6 +81,15 @@ """.strip() +REACT_HUMAN_IN_THE_LOOP_PROMPT = """ + +Ask the user for clarification if: +- The user's question is ambiguous. +- You can't find matching events or properties. +- You're unable to build a plan that effectively answers the user's question. + +""".strip() + REACT_FORMAT_REMINDER_PROMPT = """ Begin! Reminder that you must ALWAYS respond with a valid JSON blob of a single action. Use tools if necessary. Respond directly if appropriate. Format is Action:```$JSON_BLOB``` then Observation. """.strip() diff --git a/ee/hogai/taxonomy_agent/test/test_nodes.py b/ee/hogai/taxonomy_agent/test/test_nodes.py index 40127c19370b6..cb25331664331 100644 --- a/ee/hogai/taxonomy_agent/test/test_nodes.py +++ b/ee/hogai/taxonomy_agent/test/test_nodes.py @@ -11,7 +11,7 @@ TaxonomyAgentPlannerToolsNode, ) from ee.hogai.taxonomy_agent.toolkit import TaxonomyAgentToolkit, ToolkitTool -from ee.hogai.utils import AssistantState +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.models import GroupTypeMapping from posthog.schema import ( AssistantMessage, @@ -37,7 +37,7 @@ def setUp(self): def _get_node(self): class Node(TaxonomyAgentPlannerNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: prompt: ChatPromptTemplate = ChatPromptTemplate.from_messages([("user", "test")]) toolkit = DummyToolkit(self._team) return super()._run_with_prompt_and_toolkit(state, prompt, toolkit, config=config) @@ -46,19 +46,20 @@ def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: def test_agent_reconstructs_conversation(self): node = self._get_node() - history = node._construct_messages({"messages": [HumanMessage(content="Text")]}) + history = node._construct_messages(AssistantState(messages=[HumanMessage(content="Text")])) self.assertEqual(len(history), 1) self.assertEqual(history[0].type, "human") self.assertIn("Text", history[0].content) self.assertNotIn(f"{{question}}", history[0].content) history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Text"), - VisualizationMessage(answer=self.schema, plan="randomplan"), - ] - } + AssistantState( + messages=[ + HumanMessage(content="Text", id="0"), + VisualizationMessage(answer=self.schema, plan="randomplan", id="1", initiator="0"), + ], + start_id="1", + ) ) self.assertEqual(len(history), 2) self.assertEqual(history[0].type, "human") @@ -68,13 +69,14 @@ def test_agent_reconstructs_conversation(self): self.assertEqual(history[1].content, "randomplan") history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Text"), - VisualizationMessage(answer=self.schema, plan="randomplan"), - HumanMessage(content="Text"), - ] - } + AssistantState( + messages=[ + HumanMessage(content="Text", id="0"), + VisualizationMessage(answer=self.schema, plan="randomplan", id="1", initiator="0"), + HumanMessage(content="Text", id="2"), + ], + start_id="2", + ) ) self.assertEqual(len(history), 3) self.assertEqual(history[0].type, "human") @@ -89,12 +91,14 @@ def test_agent_reconstructs_conversation(self): def test_agent_reconstructs_conversation_and_omits_unknown_messages(self): node = self._get_node() history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Text"), - AssistantMessage(content="test"), - ] - } + AssistantState( + messages=[ + HumanMessage(content="Text", id="0"), + RouterMessage(content="trends", id="1"), + AssistantMessage(content="test", id="2"), + ], + start_id="0", + ) ) self.assertEqual(len(history), 1) self.assertEqual(history[0].type, "human") @@ -104,13 +108,13 @@ def test_agent_reconstructs_conversation_and_omits_unknown_messages(self): def test_agent_reconstructs_conversation_with_failures(self): node = self._get_node() history = node._construct_messages( - { - "messages": [ + AssistantState( + messages=[ HumanMessage(content="Text"), FailureMessage(content="Error"), HumanMessage(content="Text"), - ] - } + ], + ) ) self.assertEqual(len(history), 1) self.assertEqual(history[0].type, "human") @@ -120,32 +124,60 @@ def test_agent_reconstructs_conversation_with_failures(self): def test_agent_reconstructs_typical_conversation(self): node = self._get_node() history = node._construct_messages( - { - "messages": [ - HumanMessage(content="Question 1"), - RouterMessage(content="trends"), - VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1"), - AssistantMessage(content="Summary 1"), - HumanMessage(content="Question 2"), - RouterMessage(content="funnel"), - VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2"), - AssistantMessage(content="Summary 2"), - HumanMessage(content="Question 3"), - RouterMessage(content="funnel"), - ] - } + AssistantState( + messages=[ + HumanMessage(content="Question 1", id="0"), + RouterMessage(content="trends", id="1"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1", id="2", initiator="0"), + AssistantMessage(content="Summary 1", id="3"), + HumanMessage(content="Question 2", id="4"), + RouterMessage(content="funnel", id="5"), + AssistantMessage(content="Loop 1", id="6"), + HumanMessage(content="Loop Answer 1", id="7"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2", id="8", initiator="4"), + AssistantMessage(content="Summary 2", id="9"), + HumanMessage(content="Question 3", id="10"), + RouterMessage(content="funnel", id="11"), + ], + start_id="10", + ) ) - self.assertEqual(len(history), 5) + self.assertEqual(len(history), 9) self.assertEqual(history[0].type, "human") self.assertIn("Question 1", history[0].content) self.assertEqual(history[1].type, "ai") self.assertEqual(history[1].content, "Plan 1") - self.assertEqual(history[2].type, "human") - self.assertIn("Question 2", history[2].content) - self.assertEqual(history[3].type, "ai") - self.assertEqual(history[3].content, "Plan 2") - self.assertEqual(history[4].type, "human") - self.assertIn("Question 3", history[4].content) + self.assertEqual(history[2].type, "ai") + self.assertEqual(history[2].content, "Summary 1") + self.assertEqual(history[3].type, "human") + self.assertIn("Question 2", history[3].content) + self.assertEqual(history[4].type, "ai") + self.assertEqual(history[4].content, "Loop 1") + self.assertEqual(history[5].type, "human") + self.assertEqual(history[5].content, "Loop Answer 1") + self.assertEqual(history[6].content, "Plan 2") + self.assertEqual(history[6].type, "ai") + self.assertEqual(history[7].type, "ai") + self.assertEqual(history[7].content, "Summary 2") + self.assertEqual(history[8].type, "human") + self.assertIn("Question 3", history[8].content) + + def test_agent_reconstructs_conversation_without_messages_after_parent(self): + node = self._get_node() + history = node._construct_messages( + AssistantState( + messages=[ + HumanMessage(content="Question 1", id="0"), + RouterMessage(content="trends", id="1"), + AssistantMessage(content="Loop 1", id="2"), + HumanMessage(content="Loop Answer 1", id="3"), + ], + start_id="0", + ) + ) + self.assertEqual(len(history), 1) + self.assertEqual(history[0].type, "human") + self.assertIn("Question 1", history[0].content) def test_agent_filters_out_low_count_events(self): _create_person(distinct_ids=["test"], team=self.team) @@ -182,9 +214,9 @@ def test_agent_handles_output_without_action_block(self): return_value=RunnableLambda(lambda _: LangchainAIMessage(content="I don't want to output an action.")), ): node = self._get_node() - state_update = node.run({"messages": [HumanMessage(content="Question")]}, {}) - self.assertEqual(len(state_update["intermediate_steps"]), 1) - action, obs = state_update["intermediate_steps"][0] + state_update = node.run(AssistantState(messages=[HumanMessage(content="Question")]), {}) + self.assertEqual(len(state_update.intermediate_steps), 1) + action, obs = state_update.intermediate_steps[0] self.assertIsNone(obs) self.assertIn("I don't want to output an action.", action.log) self.assertIn("Action:", action.log) @@ -196,9 +228,9 @@ def test_agent_handles_output_with_malformed_json(self): return_value=RunnableLambda(lambda _: LangchainAIMessage(content="Thought.\nAction: abc")), ): node = self._get_node() - state_update = node.run({"messages": [HumanMessage(content="Question")]}, {}) - self.assertEqual(len(state_update["intermediate_steps"]), 1) - action, obs = state_update["intermediate_steps"][0] + state_update = node.run(AssistantState(messages=[HumanMessage(content="Question")]), {}) + self.assertEqual(len(state_update.intermediate_steps), 1) + action, obs = state_update.intermediate_steps[0] self.assertIsNone(obs) self.assertIn("Thought.\nAction: abc", action.log) self.assertIn("action", action.tool_input) @@ -232,34 +264,34 @@ def test_property_filters_prompt(self): class TestTaxonomyAgentPlannerToolsNode(ClickhouseTestMixin, APIBaseTest): def _get_node(self): class Node(TaxonomyAgentPlannerToolsNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: toolkit = DummyToolkit(self._team) return super()._run_with_toolkit(state, toolkit, config=config) return Node(self.team) def test_node_handles_action_name_validation_error(self): - state = { - "intermediate_steps": [(AgentAction(tool="does not exist", tool_input="input", log="log"), "test")], - "messages": [], - } + state = AssistantState( + intermediate_steps=[(AgentAction(tool="does not exist", tool_input="input", log="log"), "test")], + messages=[], + ) node = self._get_node() state_update = node.run(state, {}) - self.assertEqual(len(state_update["intermediate_steps"]), 1) - action, observation = state_update["intermediate_steps"][0] + self.assertEqual(len(state_update.intermediate_steps), 1) + action, observation = state_update.intermediate_steps[0] self.assertIsNotNone(observation) self.assertIn("", observation) def test_node_handles_action_input_validation_error(self): - state = { - "intermediate_steps": [ + state = AssistantState( + intermediate_steps=[ (AgentAction(tool="retrieve_entity_property_values", tool_input="input", log="log"), "test") ], - "messages": [], - } + messages=[], + ) node = self._get_node() state_update = node.run(state, {}) - self.assertEqual(len(state_update["intermediate_steps"]), 1) - action, observation = state_update["intermediate_steps"][0] + self.assertEqual(len(state_update.intermediate_steps), 1) + action, observation = state_update.intermediate_steps[0] self.assertIsNotNone(observation) self.assertIn("", observation) diff --git a/ee/hogai/taxonomy_agent/toolkit.py b/ee/hogai/taxonomy_agent/toolkit.py index dc8a0e092c2e6..d05b6f0c933ef 100644 --- a/ee/hogai/taxonomy_agent/toolkit.py +++ b/ee/hogai/taxonomy_agent/toolkit.py @@ -55,6 +55,7 @@ class SingleArgumentTaxonomyAgentTool(BaseModel): "retrieve_event_properties", "final_answer", "handle_incorrect_response", + "ask_user_for_help", ] arguments: str @@ -145,6 +146,16 @@ def _default_tools(self) -> list[ToolkitTool]: property_name: The name of the property that you want to retrieve values for. """, }, + { + "name": "ask_user_for_help", + "signature": "(question: str)", + "description": """ + Use this tool to ask a question to the user. Your question must be concise and clear. + + Args: + question: The question you want to ask. + """, + }, ] def render_text_description(self) -> str: diff --git a/ee/hogai/test/test_assistant.py b/ee/hogai/test/test_assistant.py index b6cd65bd4ec12..6d0bb8807d629 100644 --- a/ee/hogai/test/test_assistant.py +++ b/ee/hogai/test/test_assistant.py @@ -1,31 +1,63 @@ import json -from typing import Any +from typing import Any, Optional, cast from unittest.mock import patch -from uuid import uuid4 -from ee.hogai.utils import Conversation -from posthog.schema import AssistantMessage, HumanMessage -from ..assistant import Assistant + +from langchain_core import messages +from langchain_core.agents import AgentAction +from langchain_core.runnables import RunnableConfig, RunnableLambda from langgraph.graph.state import CompiledStateGraph +from langgraph.types import StateSnapshot +from pydantic import BaseModel + +from ee.models.assistant import Conversation +from posthog.schema import AssistantMessage, HumanMessage, ReasoningMessage +from posthog.test.base import NonAtomicBaseTest + +from ..assistant import Assistant from ..graph import AssistantGraph, AssistantNodeName -from posthog.test.base import BaseTest -from langchain_core.agents import AgentAction -class TestAssistant(BaseTest): - def _run_assistant_graph(self, test_graph: CompiledStateGraph) -> list[tuple[str, Any]]: +class TestAssistant(NonAtomicBaseTest): + CLASS_DATA_LEVEL_SETUP = False + + def setUp(self): + super().setUp() + self.conversation = Conversation.objects.create(team=self.team, user=self.user) + + def _run_assistant_graph( + self, + test_graph: Optional[CompiledStateGraph] = None, + message: Optional[str] = "Hello", + conversation: Optional[Conversation] = None, + is_new_conversation: bool = False, + ) -> list[tuple[str, Any]]: # Create assistant instance with our test graph assistant = Assistant( - team=self.team, - conversation=Conversation(messages=[HumanMessage(content="Hello")], session_id=str(uuid4())), + self.team, + conversation or self.conversation, + HumanMessage(content=message), + self.user, + is_new_conversation=is_new_conversation, ) - assistant._graph = test_graph + if test_graph: + assistant._graph = test_graph # Capture and parse output of assistant.stream() output: list[tuple[str, Any]] = [] for message in assistant.stream(): - event_line, data_line, *_ = message.split("\n") + event_line, data_line, *_ = cast(str, message).split("\n") output.append((event_line.removeprefix("event: "), json.loads(data_line.removeprefix("data: ")))) return output + def assertConversationEqual(self, output: list[tuple[str, Any]], expected_output: list[tuple[str, Any]]): + for i, ((output_msg_type, output_msg), (expected_msg_type, expected_msg)) in enumerate( + zip(output, expected_output) + ): + self.assertEqual(output_msg_type, expected_msg_type, f"Message type mismatch at index {i}") + msg_dict = ( + expected_msg.model_dump(exclude_none=True) if isinstance(expected_msg, BaseModel) else expected_msg + ) + self.assertDictContainsSubset(msg_dict, output_msg, f"Message content mismatch at index {i}") + @patch( "ee.hogai.trends.nodes.TrendsPlannerNode.run", return_value={"intermediate_steps": [(AgentAction(tool="final_answer", tool_input="", log=""), None)]}, @@ -39,19 +71,22 @@ def test_reasoning_messages_added(self, _mock_summarizer_run, _mock_funnel_plann .add_edge(AssistantNodeName.START, AssistantNodeName.TRENDS_PLANNER) .add_trends_planner(AssistantNodeName.SUMMARIZER) .add_summarizer(AssistantNodeName.END) - .compile() + .compile(), + conversation=self.conversation, ) # Assert that ReasoningMessages are added - assert output == [ - ("status", {"type": "ack"}), + expected_output = [ + ( + "message", + HumanMessage(content="Hello").model_dump(exclude_none=True), + ), ( "message", { "type": "ai/reasoning", "content": "Picking relevant events and properties", # For TrendsPlannerNode "substeps": [], - "done": True, }, ), ( @@ -60,7 +95,6 @@ def test_reasoning_messages_added(self, _mock_summarizer_run, _mock_funnel_plann "type": "ai/reasoning", "content": "Picking relevant events and properties", # For TrendsPlannerToolsNode "substeps": [], - "done": True, }, ), ( @@ -71,6 +105,7 @@ def test_reasoning_messages_added(self, _mock_summarizer_run, _mock_funnel_plann }, ), ] + self.assertConversationEqual(output, expected_output) @patch( "ee.hogai.trends.nodes.TrendsPlannerNode.run", @@ -105,19 +140,22 @@ def test_reasoning_messages_with_substeps_added(self, _mock_funnel_planner_run): AssistantGraph(self.team) .add_edge(AssistantNodeName.START, AssistantNodeName.TRENDS_PLANNER) .add_trends_planner(AssistantNodeName.END) - .compile() + .compile(), + conversation=self.conversation, ) # Assert that ReasoningMessages are added - assert output == [ - ("status", {"type": "ack"}), + expected_output = [ + ( + "message", + HumanMessage(content="Hello").model_dump(exclude_none=True), + ), ( "message", { "type": "ai/reasoning", "content": "Picking relevant events and properties", # For TrendsPlannerNode "substeps": [], - "done": True, }, ), ( @@ -131,7 +169,153 @@ def test_reasoning_messages_with_substeps_added(self, _mock_funnel_planner_run): "Analyzing `currency` event's property `purchase`", "Analyzing person property `country_of_birth`", ], - "done": True, }, ), ] + self.assertConversationEqual(output, expected_output) + + def _test_human_in_the_loop(self, graph: CompiledStateGraph): + with patch("ee.hogai.taxonomy_agent.nodes.TaxonomyAgentPlannerNode._model") as mock: + config: RunnableConfig = { + "configurable": { + "thread_id": self.conversation.id, + } + } + + # Interrupt the graph + message = """ + Thought: Let's ask for help. + Action: + ``` + { + "action": "ask_user_for_help", + "action_input": "Need help with this query" + } + ``` + """ + mock.return_value = RunnableLambda(lambda _: messages.AIMessage(content=message)) + output = self._run_assistant_graph(graph, conversation=self.conversation) + expected_output = [ + ("message", HumanMessage(content="Hello")), + ("message", ReasoningMessage(content="Picking relevant events and properties", substeps=[])), + ("message", ReasoningMessage(content="Picking relevant events and properties", substeps=[])), + ("message", AssistantMessage(content="Need help with this query")), + ] + self.assertConversationEqual(output, expected_output) + snapshot: StateSnapshot = graph.get_state(config) + self.assertTrue(snapshot.next) + self.assertIn("intermediate_steps", snapshot.values) + + # Resume the graph from the interruption point. + message = """ + Thought: Finish. + Action: + ``` + { + "action": "final_answer", + "action_input": "Plan" + } + ``` + """ + mock.return_value = RunnableLambda(lambda _: messages.AIMessage(content=message)) + output = self._run_assistant_graph(graph, conversation=self.conversation, message="It's straightforward") + expected_output = [ + ("message", HumanMessage(content="It's straightforward")), + ("message", ReasoningMessage(content="Picking relevant events and properties", substeps=[])), + ("message", ReasoningMessage(content="Picking relevant events and properties", substeps=[])), + ] + self.assertConversationEqual(output, expected_output) + snapshot: StateSnapshot = graph.get_state(config) + self.assertFalse(snapshot.next) + self.assertEqual(snapshot.values.get("intermediate_steps"), []) + self.assertEqual(snapshot.values["plan"], "Plan") + + def test_trends_interrupt_when_asking_for_help(self): + graph = ( + AssistantGraph(self.team) + .add_edge(AssistantNodeName.START, AssistantNodeName.TRENDS_PLANNER) + .add_trends_planner(AssistantNodeName.END) + .compile() + ) + self._test_human_in_the_loop(graph) + + def test_funnels_interrupt_when_asking_for_help(self): + graph = ( + AssistantGraph(self.team) + .add_edge(AssistantNodeName.START, AssistantNodeName.FUNNEL_PLANNER) + .add_funnel_planner(AssistantNodeName.END) + .compile() + ) + self._test_human_in_the_loop(graph) + + def test_intermediate_steps_are_updated_after_feedback(self): + with patch("ee.hogai.taxonomy_agent.nodes.TaxonomyAgentPlannerNode._model") as mock: + graph = ( + AssistantGraph(self.team) + .add_edge(AssistantNodeName.START, AssistantNodeName.TRENDS_PLANNER) + .add_trends_planner(AssistantNodeName.END) + .compile() + ) + config: RunnableConfig = { + "configurable": { + "thread_id": self.conversation.id, + } + } + + # Interrupt the graph + message = """ + Thought: Let's ask for help. + Action: + ``` + { + "action": "ask_user_for_help", + "action_input": "Need help with this query" + } + ``` + """ + mock.return_value = RunnableLambda(lambda _: messages.AIMessage(content=message)) + self._run_assistant_graph(graph, conversation=self.conversation) + snapshot: StateSnapshot = graph.get_state(config) + self.assertTrue(snapshot.next) + self.assertIn("intermediate_steps", snapshot.values) + self.assertEqual(len(snapshot.values["intermediate_steps"]), 1) + action, observation = snapshot.values["intermediate_steps"][0] + self.assertEqual(action.tool, "ask_user_for_help") + self.assertIsNone(observation) + + self._run_assistant_graph(graph, conversation=self.conversation, message="It's straightforward") + snapshot: StateSnapshot = graph.get_state(config) + self.assertTrue(snapshot.next) + self.assertIn("intermediate_steps", snapshot.values) + self.assertEqual(len(snapshot.values["intermediate_steps"]), 2) + action, observation = snapshot.values["intermediate_steps"][0] + self.assertEqual(action.tool, "ask_user_for_help") + self.assertEqual(observation, "It's straightforward") + action, observation = snapshot.values["intermediate_steps"][1] + self.assertEqual(action.tool, "ask_user_for_help") + self.assertIsNone(observation) + + def test_new_conversation_handles_serialized_conversation(self): + graph = ( + AssistantGraph(self.team) + .add_node(AssistantNodeName.ROUTER, lambda _: {"messages": [AssistantMessage(content="Hello")]}) + .add_edge(AssistantNodeName.START, AssistantNodeName.ROUTER) + .add_edge(AssistantNodeName.ROUTER, AssistantNodeName.END) + .compile() + ) + output = self._run_assistant_graph( + graph, + conversation=self.conversation, + is_new_conversation=True, + ) + expected_output = [ + ("conversation", {"id": str(self.conversation.id)}), + ] + self.assertConversationEqual(output[:1], expected_output) + + output = self._run_assistant_graph( + graph, + conversation=self.conversation, + is_new_conversation=False, + ) + self.assertNotEqual(output[0][0], "conversation") diff --git a/ee/hogai/test/test_utils.py b/ee/hogai/test/test_utils.py index 42e54d058c556..8c32471c88508 100644 --- a/ee/hogai/test/test_utils.py +++ b/ee/hogai/test/test_utils.py @@ -1,6 +1,4 @@ -from langchain_core.messages import HumanMessage as LangchainHumanMessage - -from ee.hogai.utils import filter_visualization_conversation, merge_human_messages +from ee.hogai.utils.helpers import filter_messages from posthog.schema import ( AssistantMessage, AssistantTrendsQuery, @@ -13,40 +11,29 @@ class TestTrendsUtils(BaseTest): - def test_merge_human_messages(self): - res = merge_human_messages( - [ - LangchainHumanMessage(content="Text"), - LangchainHumanMessage(content="Text"), - LangchainHumanMessage(content="Te"), - LangchainHumanMessage(content="xt"), - ] - ) - self.assertEqual(len(res), 1) - self.assertEqual(res, [LangchainHumanMessage(content="Text\nTe\nxt")]) - - def test_filter_trends_conversation(self): - human_messages, visualization_messages = filter_visualization_conversation( + def test_filters_and_merges_human_messages(self): + conversation = [ + HumanMessage(content="Text"), + FailureMessage(content="Error"), + HumanMessage(content="Text"), + VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="plan"), + HumanMessage(content="Text2"), + VisualizationMessage(answer=None, plan="plan"), + ] + messages = filter_messages(conversation) + self.assertEqual(len(messages), 4) + self.assertEqual( [ - HumanMessage(content="Text"), - FailureMessage(content="Error"), - HumanMessage(content="Text"), + HumanMessage(content="Text\nText"), VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="plan"), HumanMessage(content="Text2"), VisualizationMessage(answer=None, plan="plan"), - ] - ) - self.assertEqual(len(human_messages), 2) - self.assertEqual(len(visualization_messages), 1) - self.assertEqual( - human_messages, [LangchainHumanMessage(content="Text"), LangchainHumanMessage(content="Text2")] - ) - self.assertEqual( - visualization_messages, [VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="plan")] + ], + messages, ) def test_filters_typical_conversation(self): - human_messages, visualization_messages = filter_visualization_conversation( + messages = filter_messages( [ HumanMessage(content="Question 1"), RouterMessage(content="trends"), @@ -58,15 +45,30 @@ def test_filters_typical_conversation(self): AssistantMessage(content="Summary 2"), ] ) - self.assertEqual(len(human_messages), 2) - self.assertEqual(len(visualization_messages), 2) - self.assertEqual( - human_messages, [LangchainHumanMessage(content="Question 1"), LangchainHumanMessage(content="Question 2")] - ) + self.assertEqual(len(messages), 6) self.assertEqual( - visualization_messages, + messages, [ + HumanMessage(content="Question 1"), VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 1"), + AssistantMessage(content="Summary 1"), + HumanMessage(content="Question 2"), VisualizationMessage(answer=AssistantTrendsQuery(series=[]), plan="Plan 2"), + AssistantMessage(content="Summary 2"), + ], + ) + + def test_joins_human_messages(self): + messages = filter_messages( + [ + HumanMessage(content="Question 1"), + HumanMessage(content="Question 2"), + ] + ) + self.assertEqual(len(messages), 1) + self.assertEqual( + messages, + [ + HumanMessage(content="Question 1\nQuestion 2"), ], ) diff --git a/ee/hogai/trends/nodes.py b/ee/hogai/trends/nodes.py index b6b33cf6d8354..e430b4036e043 100644 --- a/ee/hogai/trends/nodes.py +++ b/ee/hogai/trends/nodes.py @@ -6,12 +6,12 @@ from ee.hogai.taxonomy_agent.nodes import TaxonomyAgentPlannerNode, TaxonomyAgentPlannerToolsNode from ee.hogai.trends.prompts import REACT_SYSTEM_PROMPT, TRENDS_SYSTEM_PROMPT from ee.hogai.trends.toolkit import TRENDS_SCHEMA, TrendsTaxonomyAgentToolkit -from ee.hogai.utils import AssistantState +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import AssistantTrendsQuery class TrendsPlannerNode(TaxonomyAgentPlannerNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: toolkit = TrendsTaxonomyAgentToolkit(self._team) prompt = ChatPromptTemplate.from_messages( [ @@ -23,7 +23,7 @@ def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: class TrendsPlannerToolsNode(TaxonomyAgentPlannerToolsNode): - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: toolkit = TrendsTaxonomyAgentToolkit(self._team) return super()._run_with_toolkit(state, toolkit, config=config) @@ -36,7 +36,7 @@ class TrendsGeneratorNode(SchemaGeneratorNode[AssistantTrendsQuery]): OUTPUT_MODEL = TrendsSchemaGeneratorOutput OUTPUT_SCHEMA = TRENDS_SCHEMA - def run(self, state: AssistantState, config: RunnableConfig) -> AssistantState: + def run(self, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: prompt = ChatPromptTemplate.from_messages( [ ("system", TRENDS_SYSTEM_PROMPT), diff --git a/ee/hogai/trends/prompts.py b/ee/hogai/trends/prompts.py index 2ac9496480cdd..dcc1daeaa5a00 100644 --- a/ee/hogai/trends/prompts.py +++ b/ee/hogai/trends/prompts.py @@ -12,6 +12,8 @@ {{react_format}} +{{react_human_in_the_loop}} + Below you will find information on how to correctly discover the taxonomy of the user's data. diff --git a/ee/hogai/trends/test/test_nodes.py b/ee/hogai/trends/test/test_nodes.py index 44973b3195377..369ce8bc9b292 100644 --- a/ee/hogai/trends/test/test_nodes.py +++ b/ee/hogai/trends/test/test_nodes.py @@ -4,6 +4,7 @@ from langchain_core.runnables import RunnableLambda from ee.hogai.trends.nodes import TrendsGeneratorNode, TrendsSchemaGeneratorOutput +from ee.hogai.utils.types import AssistantState, PartialAssistantState from posthog.schema import ( AssistantTrendsQuery, HumanMessage, @@ -17,6 +18,7 @@ class TestTrendsGeneratorNode(ClickhouseTestMixin, APIBaseTest): maxDiff = None def setUp(self): + super().setUp() self.schema = AssistantTrendsQuery(series=[]) def test_node_runs(self): @@ -26,16 +28,16 @@ def test_node_runs(self): lambda _: TrendsSchemaGeneratorOutput(query=self.schema).model_dump() ) new_state = node.run( - { - "messages": [HumanMessage(content="Text")], - "plan": "Plan", - }, + AssistantState( + messages=[HumanMessage(content="Text")], + plan="Plan", + ), {}, ) self.assertEqual( new_state, - { - "messages": [VisualizationMessage(answer=self.schema, plan="Plan", done=True)], - "intermediate_steps": None, - }, + PartialAssistantState( + messages=[VisualizationMessage(answer=self.schema, plan="Plan", id=new_state.messages[0].id)], + intermediate_steps=None, + ), ) diff --git a/ee/hogai/trends/toolkit.py b/ee/hogai/trends/toolkit.py index d69830d2f2cd6..5fd7a35f0f18a 100644 --- a/ee/hogai/trends/toolkit.py +++ b/ee/hogai/trends/toolkit.py @@ -1,8 +1,6 @@ from ee.hogai.taxonomy_agent.toolkit import TaxonomyAgentToolkit, ToolkitTool -from ee.hogai.utils import dereference_schema -from posthog.schema import ( - AssistantTrendsQuery, -) +from ee.hogai.utils.helpers import dereference_schema +from posthog.schema import AssistantTrendsQuery class TrendsTaxonomyAgentToolkit(TaxonomyAgentToolkit): diff --git a/ee/hogai/utils.py b/ee/hogai/utils.py deleted file mode 100644 index 559a369df83c8..0000000000000 --- a/ee/hogai/utils.py +++ /dev/null @@ -1,117 +0,0 @@ -import operator -from abc import ABC, abstractmethod -from collections.abc import Sequence -from enum import StrEnum -from typing import Annotated, Optional, TypedDict, Union - -from jsonref import replace_refs -from langchain_core.agents import AgentAction -from langchain_core.messages import ( - HumanMessage as LangchainHumanMessage, - merge_message_runs, -) -from langchain_core.runnables import RunnableConfig -from langgraph.graph import END, START -from pydantic import BaseModel, Field - -from posthog.models.team.team import Team -from posthog.schema import ( - AssistantMessage, - FailureMessage, - HumanMessage, - ReasoningMessage, - RootAssistantMessage, - RouterMessage, - VisualizationMessage, -) - -AssistantMessageUnion = Union[ - AssistantMessage, HumanMessage, VisualizationMessage, FailureMessage, RouterMessage, ReasoningMessage -] - - -class Conversation(BaseModel): - messages: list[RootAssistantMessage] = Field(..., min_length=1, max_length=50) - session_id: str - - -class AssistantState(TypedDict, total=False): - messages: Annotated[Sequence[AssistantMessageUnion], operator.add] - intermediate_steps: Optional[list[tuple[AgentAction, Optional[str]]]] - plan: Optional[str] - - -class AssistantNodeName(StrEnum): - START = START - END = END - ROUTER = "router" - TRENDS_PLANNER = "trends_planner" - TRENDS_PLANNER_TOOLS = "trends_planner_tools" - TRENDS_GENERATOR = "trends_generator" - TRENDS_GENERATOR_TOOLS = "trends_generator_tools" - FUNNEL_PLANNER = "funnel_planner" - FUNNEL_PLANNER_TOOLS = "funnel_planner_tools" - FUNNEL_GENERATOR = "funnel_generator" - FUNNEL_GENERATOR_TOOLS = "funnel_generator_tools" - SUMMARIZER = "summarizer" - - -class AssistantNode(ABC): - _team: Team - - def __init__(self, team: Team): - self._team = team - - @abstractmethod - def run(cls, state: AssistantState, config: RunnableConfig) -> AssistantState: - raise NotImplementedError - - -def remove_line_breaks(line: str) -> str: - return line.replace("\n", " ") - - -def merge_human_messages(messages: list[LangchainHumanMessage]) -> list[LangchainHumanMessage]: - """ - Filters out duplicated human messages and merges them into one message. - """ - contents = set() - filtered_messages = [] - for message in messages: - if message.content in contents: - continue - contents.add(message.content) - filtered_messages.append(message) - return merge_message_runs(filtered_messages) - - -def filter_visualization_conversation( - messages: Sequence[AssistantMessageUnion], -) -> tuple[list[LangchainHumanMessage], list[VisualizationMessage]]: - """ - Splits, filters and merges the message history to be consumable by agents. Returns human and visualization messages. - """ - stack: list[LangchainHumanMessage] = [] - human_messages: list[LangchainHumanMessage] = [] - visualization_messages: list[VisualizationMessage] = [] - - for message in messages: - if isinstance(message, HumanMessage): - stack.append(LangchainHumanMessage(content=message.content)) - elif isinstance(message, VisualizationMessage) and message.answer: - if stack: - human_messages += merge_human_messages(stack) - stack = [] - visualization_messages.append(message) - - if stack: - human_messages += merge_human_messages(stack) - - return human_messages, visualization_messages - - -def dereference_schema(schema: dict) -> dict: - new_schema: dict = replace_refs(schema, proxies=False, lazy_load=False) - if "$defs" in new_schema: - new_schema.pop("$defs") - return new_schema diff --git a/ee/hogai/utils/__init__.py b/ee/hogai/utils/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ee/hogai/utils/helpers.py b/ee/hogai/utils/helpers.py new file mode 100644 index 0000000000000..4fc8cf3b5d6a0 --- /dev/null +++ b/ee/hogai/utils/helpers.py @@ -0,0 +1,79 @@ +from collections.abc import Sequence +from typing import Optional, TypeVar, Union + +from jsonref import replace_refs +from langchain_core.messages import ( + HumanMessage as LangchainHumanMessage, + merge_message_runs, +) + +from posthog.schema import ( + AssistantMessage, + HumanMessage, + VisualizationMessage, +) + +from .types import AIMessageUnion, AssistantMessageUnion + + +def remove_line_breaks(line: str) -> str: + return line.replace("\n", " ") + + +def filter_messages( + messages: Sequence[AssistantMessageUnion], + entity_filter: Union[tuple[type[AIMessageUnion], ...], type[AIMessageUnion]] = ( + AssistantMessage, + VisualizationMessage, + ), +) -> list[AssistantMessageUnion]: + """ + Filters and merges the message history to be consumable by agents. Returns human and AI messages. + """ + stack: list[LangchainHumanMessage] = [] + filtered_messages: list[AssistantMessageUnion] = [] + + def _merge_stack(stack: list[LangchainHumanMessage]) -> list[HumanMessage]: + return [ + HumanMessage(content=langchain_message.content, id=langchain_message.id) + for langchain_message in merge_message_runs(stack) + ] + + for message in messages: + if isinstance(message, HumanMessage): + stack.append(LangchainHumanMessage(content=message.content, id=message.id)) + elif isinstance(message, entity_filter): + if stack: + filtered_messages += _merge_stack(stack) + stack = [] + filtered_messages.append(message) + + if stack: + filtered_messages += _merge_stack(stack) + + return filtered_messages + + +T = TypeVar("T", bound=AssistantMessageUnion) + + +def find_last_message_of_type(messages: Sequence[AssistantMessageUnion], message_type: type[T]) -> Optional[T]: + return next((msg for msg in reversed(messages) if isinstance(msg, message_type)), None) + + +def slice_messages_to_conversation_start( + messages: Sequence[AssistantMessageUnion], start_id: Optional[str] = None +) -> Sequence[AssistantMessageUnion]: + result = [] + for msg in messages: + result.append(msg) + if msg.id == start_id: + break + return result + + +def dereference_schema(schema: dict) -> dict: + new_schema: dict = replace_refs(schema, proxies=False, lazy_load=False) + if "$defs" in new_schema: + new_schema.pop("$defs") + return new_schema diff --git a/ee/hogai/utils/nodes.py b/ee/hogai/utils/nodes.py new file mode 100644 index 0000000000000..6a4358243b666 --- /dev/null +++ b/ee/hogai/utils/nodes.py @@ -0,0 +1,18 @@ +from abc import ABC, abstractmethod + +from langchain_core.runnables import RunnableConfig + +from posthog.models.team.team import Team + +from .types import AssistantState, PartialAssistantState + + +class AssistantNode(ABC): + _team: Team + + def __init__(self, team: Team): + self._team = team + + @abstractmethod + def run(cls, state: AssistantState, config: RunnableConfig) -> PartialAssistantState: + raise NotImplementedError diff --git a/ee/hogai/utils/state.py b/ee/hogai/utils/state.py new file mode 100644 index 0000000000000..3392f3362adb9 --- /dev/null +++ b/ee/hogai/utils/state.py @@ -0,0 +1,70 @@ +from typing import Any, Literal, TypedDict, TypeGuard, Union + +from langchain_core.messages import AIMessageChunk + +from ee.hogai.utils.types import AssistantNodeName, AssistantState, PartialAssistantState + +# A state update can have a partial state or a LangGraph's reserved dataclasses like Interrupt. +GraphValueUpdate = dict[AssistantNodeName, dict[Any, Any] | Any] + +GraphValueUpdateTuple = tuple[Literal["values"], GraphValueUpdate] + + +def is_value_update(update: list[Any]) -> TypeGuard[GraphValueUpdateTuple]: + """ + Transition between nodes. + + Returns: + PartialAssistantState, Interrupt, or other LangGraph reserved dataclasses. + """ + return len(update) == 2 and update[0] == "updates" + + +def validate_value_update(update: GraphValueUpdate) -> dict[AssistantNodeName, PartialAssistantState | Any]: + validated_update = {} + for node_name, value in update.items(): + if isinstance(value, dict): + validated_update[node_name] = PartialAssistantState.model_validate(value) + else: + validated_update[node_name] = value + return validated_update + + +class LangGraphState(TypedDict): + langgraph_node: AssistantNodeName + + +GraphMessageUpdateTuple = tuple[Literal["messages"], tuple[Union[AIMessageChunk, Any], LangGraphState]] + + +def is_message_update(update: list[Any]) -> TypeGuard[GraphMessageUpdateTuple]: + """ + Streaming of messages. + """ + return len(update) == 2 and update[0] == "messages" + + +GraphStateUpdateTuple = tuple[Literal["updates"], dict[Any, Any]] + + +def is_state_update(update: list[Any]) -> TypeGuard[GraphStateUpdateTuple]: + """ + Update of the state. Returns a full state. + """ + return len(update) == 2 and update[0] == "values" + + +def validate_state_update(state_update: dict[Any, Any]) -> AssistantState: + return AssistantState.model_validate(state_update) + + +GraphTaskStartedUpdateTuple = tuple[Literal["debug"], tuple[Union[AIMessageChunk, Any], LangGraphState]] + + +def is_task_started_update( + update: list[Any], +) -> TypeGuard[GraphTaskStartedUpdateTuple]: + """ + Streaming of messages. + """ + return len(update) == 2 and update[0] == "debug" and update[1]["type"] == "task" diff --git a/ee/hogai/utils/types.py b/ee/hogai/utils/types.py new file mode 100644 index 0000000000000..2df027b6f85af --- /dev/null +++ b/ee/hogai/utils/types.py @@ -0,0 +1,52 @@ +import operator +from collections.abc import Sequence +from enum import StrEnum +from typing import Annotated, Optional, Union + +from langchain_core.agents import AgentAction +from langgraph.graph import END, START +from pydantic import BaseModel, Field + +from posthog.schema import ( + AssistantMessage, + FailureMessage, + HumanMessage, + ReasoningMessage, + RouterMessage, + VisualizationMessage, +) + +AIMessageUnion = Union[AssistantMessage, VisualizationMessage, FailureMessage, RouterMessage, ReasoningMessage] +AssistantMessageUnion = Union[HumanMessage, AIMessageUnion] + + +class _SharedAssistantState(BaseModel): + intermediate_steps: Optional[list[tuple[AgentAction, Optional[str]]]] = Field(default=None) + start_id: Optional[str] = Field(default=None) + """ + The ID of the message from which the conversation started. + """ + plan: Optional[str] = Field(default=None) + + +class AssistantState(_SharedAssistantState): + messages: Annotated[Sequence[AssistantMessageUnion], operator.add] + + +class PartialAssistantState(_SharedAssistantState): + messages: Optional[Annotated[Sequence[AssistantMessageUnion], operator.add]] = Field(default=None) + + +class AssistantNodeName(StrEnum): + START = START + END = END + ROUTER = "router" + TRENDS_PLANNER = "trends_planner" + TRENDS_PLANNER_TOOLS = "trends_planner_tools" + TRENDS_GENERATOR = "trends_generator" + TRENDS_GENERATOR_TOOLS = "trends_generator_tools" + FUNNEL_PLANNER = "funnel_planner" + FUNNEL_PLANNER_TOOLS = "funnel_planner_tools" + FUNNEL_GENERATOR = "funnel_generator" + FUNNEL_GENERATOR_TOOLS = "funnel_generator_tools" + SUMMARIZER = "summarizer" diff --git a/ee/migrations/0018_conversation_conversationcheckpoint_and_more.py b/ee/migrations/0018_conversation_conversationcheckpoint_and_more.py new file mode 100644 index 0000000000000..ec48cc780ad57 --- /dev/null +++ b/ee/migrations/0018_conversation_conversationcheckpoint_and_more.py @@ -0,0 +1,147 @@ +# Generated by Django 4.2.15 on 2024-12-11 15:51 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import posthog.models.utils + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0528_project_field_in_taxonomy"), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("ee", "0017_accesscontrol_and_more"), + ] + + operations = [ + migrations.CreateModel( + name="Conversation", + fields=[ + ( + "id", + models.UUIDField( + default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + ), + ), + ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="ConversationCheckpoint", + fields=[ + ( + "id", + models.UUIDField( + default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + ), + ), + ( + "checkpoint_ns", + models.TextField( + default="", + help_text='Checkpoint namespace. Denotes the path to the subgraph node the checkpoint originates from, separated by `|` character, e.g. `"child|grandchild"`. Defaults to "" (root graph).', + ), + ), + ("checkpoint", models.JSONField(help_text="Serialized checkpoint data.", null=True)), + ("metadata", models.JSONField(help_text="Serialized checkpoint metadata.", null=True)), + ( + "parent_checkpoint", + models.ForeignKey( + help_text="Parent checkpoint ID.", + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="children", + to="ee.conversationcheckpoint", + ), + ), + ( + "thread", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, related_name="checkpoints", to="ee.conversation" + ), + ), + ], + ), + migrations.CreateModel( + name="ConversationCheckpointWrite", + fields=[ + ( + "id", + models.UUIDField( + default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + ), + ), + ("task_id", models.UUIDField(help_text="Identifier for the task creating the checkpoint write.")), + ( + "idx", + models.IntegerField( + help_text="Index of the checkpoint write. It is an integer value where negative numbers are reserved for special cases, such as node interruption." + ), + ), + ( + "channel", + models.TextField( + help_text="An arbitrary string defining the channel name. For example, it can be a node name or a reserved LangGraph's enum." + ), + ), + ("type", models.TextField(help_text="Type of the serialized blob. For example, `json`.", null=True)), + ("blob", models.BinaryField(null=True)), + ( + "checkpoint", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="writes", + to="ee.conversationcheckpoint", + ), + ), + ], + ), + migrations.CreateModel( + name="ConversationCheckpointBlob", + fields=[ + ( + "id", + models.UUIDField( + default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + ), + ), + ( + "channel", + models.TextField( + help_text="An arbitrary string defining the channel name. For example, it can be a node name or a reserved LangGraph's enum." + ), + ), + ("version", models.TextField(help_text="Monotonically increasing version of the channel.")), + ("type", models.TextField(help_text="Type of the serialized blob. For example, `json`.", null=True)), + ("blob", models.BinaryField(null=True)), + ( + "checkpoint", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="blobs", + to="ee.conversationcheckpoint", + ), + ), + ], + ), + migrations.AddConstraint( + model_name="conversationcheckpointwrite", + constraint=models.UniqueConstraint( + fields=("checkpoint_id", "task_id", "idx"), name="unique_checkpoint_write" + ), + ), + migrations.AddConstraint( + model_name="conversationcheckpointblob", + constraint=models.UniqueConstraint( + fields=("checkpoint_id", "channel", "version"), name="unique_checkpoint_blob" + ), + ), + migrations.AddConstraint( + model_name="conversationcheckpoint", + constraint=models.UniqueConstraint(fields=("id", "checkpoint_ns", "thread"), name="unique_checkpoint"), + ), + ] diff --git a/ee/migrations/max_migration.txt b/ee/migrations/max_migration.txt index 449d87290c304..fb889f1cc34cf 100644 --- a/ee/migrations/max_migration.txt +++ b/ee/migrations/max_migration.txt @@ -1 +1 @@ -0017_accesscontrol_and_more +0018_conversation_conversationcheckpoint_and_more diff --git a/ee/models/__init__.py b/ee/models/__init__.py index df7cfcba704e6..2067d11f7618f 100644 --- a/ee/models/__init__.py +++ b/ee/models/__init__.py @@ -1,3 +1,4 @@ +from .assistant import Conversation, ConversationCheckpoint, ConversationCheckpointBlob, ConversationCheckpointWrite from .dashboard_privilege import DashboardPrivilege from .event_definition import EnterpriseEventDefinition from .explicit_team_membership import ExplicitTeamMembership @@ -10,7 +11,11 @@ __all__ = [ "AccessControl", + "ConversationCheckpoint", + "ConversationCheckpointBlob", + "ConversationCheckpointWrite", "DashboardPrivilege", + "Conversation", "EnterpriseEventDefinition", "EnterprisePropertyDefinition", "ExplicitTeamMembership", diff --git a/ee/models/assistant.py b/ee/models/assistant.py new file mode 100644 index 0000000000000..390a7ab7a117f --- /dev/null +++ b/ee/models/assistant.py @@ -0,0 +1,83 @@ +from collections.abc import Iterable + +from django.db import models +from langgraph.checkpoint.serde.types import TASKS + +from posthog.models.team.team import Team +from posthog.models.user import User +from posthog.models.utils import UUIDModel + + +class Conversation(UUIDModel): + user = models.ForeignKey(User, on_delete=models.CASCADE) + team = models.ForeignKey(Team, on_delete=models.CASCADE) + + +class ConversationCheckpoint(UUIDModel): + thread = models.ForeignKey(Conversation, on_delete=models.CASCADE, related_name="checkpoints") + checkpoint_ns = models.TextField( + default="", + help_text='Checkpoint namespace. Denotes the path to the subgraph node the checkpoint originates from, separated by `|` character, e.g. `"child|grandchild"`. Defaults to "" (root graph).', + ) + parent_checkpoint = models.ForeignKey( + "self", null=True, on_delete=models.CASCADE, related_name="children", help_text="Parent checkpoint ID." + ) + checkpoint = models.JSONField(null=True, help_text="Serialized checkpoint data.") + metadata = models.JSONField(null=True, help_text="Serialized checkpoint metadata.") + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["id", "checkpoint_ns", "thread"], + name="unique_checkpoint", + ) + ] + + @property + def pending_sends(self) -> Iterable["ConversationCheckpointWrite"]: + if self.parent_checkpoint is None: + return [] + return self.parent_checkpoint.writes.filter(channel=TASKS).order_by("task_id", "idx") + + @property + def pending_writes(self) -> Iterable["ConversationCheckpointWrite"]: + return self.writes.order_by("idx", "task_id") + + +class ConversationCheckpointBlob(UUIDModel): + checkpoint = models.ForeignKey(ConversationCheckpoint, on_delete=models.CASCADE, related_name="blobs") + channel = models.TextField( + help_text="An arbitrary string defining the channel name. For example, it can be a node name or a reserved LangGraph's enum." + ) + version = models.TextField(help_text="Monotonically increasing version of the channel.") + type = models.TextField(null=True, help_text="Type of the serialized blob. For example, `json`.") + blob = models.BinaryField(null=True) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["checkpoint_id", "channel", "version"], + name="unique_checkpoint_blob", + ) + ] + + +class ConversationCheckpointWrite(UUIDModel): + checkpoint = models.ForeignKey(ConversationCheckpoint, on_delete=models.CASCADE, related_name="writes") + task_id = models.UUIDField(help_text="Identifier for the task creating the checkpoint write.") + idx = models.IntegerField( + help_text="Index of the checkpoint write. It is an integer value where negative numbers are reserved for special cases, such as node interruption." + ) + channel = models.TextField( + help_text="An arbitrary string defining the channel name. For example, it can be a node name or a reserved LangGraph's enum." + ) + type = models.TextField(null=True, help_text="Type of the serialized blob. For example, `json`.") + blob = models.BinaryField(null=True) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["checkpoint_id", "task_id", "idx"], + name="unique_checkpoint_write", + ) + ] diff --git a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr b/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr deleted file mode 100644 index 389933177e2ca..0000000000000 --- a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr +++ /dev/null @@ -1,1649 +0,0 @@ -# serializer version: 1 -# name: TestClickhouseSessionRecordingsListFromFilters.test_effect_of_poe_settings_on_query_generated_0_test_poe_v1_still_falls_back_to_person_subquery - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, - max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, - dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_13)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 50000 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_effect_of_poe_settings_on_query_generated_1_test_poe_being_unavailable_we_fall_back_to_person_id_overrides - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, - max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, - dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_15)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_16)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 50000 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_effect_of_poe_settings_on_query_generated_2_test_poe_being_unavailable_we_fall_back_to_person_subquery_but_still_use_mat_props - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, - max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, - dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_15)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_16)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 50000 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_effect_of_poe_settings_on_query_generated_3_test_allow_denormalised_props_fix_does_not_stop_all_poe_processing - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, - max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, - dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_13)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 50000 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_effect_of_poe_settings_on_query_generated_4_test_poe_v2_available_person_properties_are_used_in_replay_listing - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, - max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, - dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_13)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 50000 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_00_poe_v2_and_materialized_columns_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_00_poe_v2_and_materialized_columns_allowed_with_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_01_poe_v2_and_materialized_columns_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_01_poe_v2_and_materialized_columns_allowed_without_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_02_poe_v2_and_materialized_columns_off_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_02_poe_v2_and_materialized_columns_off_with_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_03_poe_v2_and_materialized_columns_off_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_03_poe_v2_and_materialized_columns_off_without_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_04_poe_off_and_materialized_columns_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_04_poe_off_and_materialized_columns_allowed_with_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_05_poe_off_and_materialized_columns_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_05_poe_off_and_materialized_columns_allowed_without_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_06_poe_off_and_materialized_columns_not_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_06_poe_off_and_materialized_columns_not_allowed_with_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_07_poe_off_and_materialized_columns_not_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_07_poe_off_and_materialized_columns_not_allowed_without_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_08_poe_v1_and_materialized_columns_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_08_poe_v1_and_materialized_columns_allowed_with_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_09_poe_v1_and_materialized_columns_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_09_poe_v1_and_materialized_columns_allowed_without_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_10_poe_v1_and_not_materialized_columns_not_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_10_poe_v1_and_not_materialized_columns_not_allowed_with_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_11_poe_v1_and_not_materialized_columns_not_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_event_filter_with_person_properties_materialized_11_poe_v1_and_not_materialized_columns_not_allowed_without_materialization.1 - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_00_poe_v2_and_materialized_columns_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_01_poe_v2_and_materialized_columns_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_02_poe_v2_and_materialized_columns_off_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_03_poe_v2_and_materialized_columns_off_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_04_poe_off_and_materialized_columns_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_05_poe_off_and_materialized_columns_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_06_poe_off_and_materialized_columns_not_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_07_poe_off_and_materialized_columns_not_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_08_poe_v1_and_materialized_columns_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_09_poe_v1_and_materialized_columns_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_10_poe_v1_and_not_materialized_columns_not_allowed_with_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- -# name: TestClickhouseSessionRecordingsListFromFilters.test_person_id_filter_11_poe_v1_and_not_materialized_columns_not_allowed_without_materialization - ''' - SELECT s.session_id AS session_id, - any(s.team_id), - any(s.distinct_id), - min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, - max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, - dateDiff('SECOND', start_time, end_time) AS duration, - argMinMerge(s.first_url) AS first_url, - sum(s.click_count) AS click_count, - sum(s.keypress_count) AS keypress_count, - sum(s.mouse_activity_count) AS mouse_activity_count, - divide(sum(s.active_milliseconds), 1000) AS active_seconds, - minus(duration, active_seconds) AS inactive_seconds, - sum(s.console_log_count) AS console_log_count, - sum(s.console_warn_count) AS console_warn_count, - sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, - round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score - FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) - GROUP BY s.session_id - HAVING 1 - ORDER BY start_time DESC - LIMIT 51 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0, - allow_experimental_analyzer=0 - ''' -# --- diff --git a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr b/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr index 7c4d474405fc4..97bc6424e1cbd 100644 --- a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr +++ b/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr @@ -19,12 +19,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -51,24 +51,24 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -95,24 +95,24 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -139,12 +139,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -171,12 +171,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -238,12 +238,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -313,12 +313,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -388,12 +388,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -463,12 +463,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -538,27 +538,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -628,27 +628,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -718,27 +718,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -808,27 +808,27 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email - FROM person - WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 99999) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -898,12 +898,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -973,12 +973,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1048,12 +1048,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1123,12 +1123,12 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT events.`$session_id` AS session_id - FROM events - WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) - GROUP BY events.`$session_id` - HAVING 1))) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1163,10 +1163,10 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1201,10 +1201,10 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1239,10 +1239,10 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1277,10 +1277,10 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1315,16 +1315,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1359,16 +1359,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1403,16 +1403,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1447,16 +1447,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1491,16 +1491,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1535,16 +1535,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1579,16 +1579,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -1623,16 +1623,16 @@ ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, - (SELECT DISTINCT events.`$session_id` AS `$session_id` - FROM events - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), globalIn(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC diff --git a/ee/session_recordings/queries/test/test_session_recording_list_from_filters.py b/ee/session_recordings/queries/test/test_session_recording_list_from_filters.py deleted file mode 100644 index 391d12071966f..0000000000000 --- a/ee/session_recordings/queries/test/test_session_recording_list_from_filters.py +++ /dev/null @@ -1,353 +0,0 @@ -import re -from itertools import product -from uuid import uuid4 - -from dateutil.relativedelta import relativedelta -from django.utils.timezone import now -from freezegun import freeze_time -from parameterized import parameterized - -from ee.clickhouse.materialized_columns.columns import materialize -from posthog.clickhouse.client import sync_execute -from posthog.hogql.ast import CompareOperation, And, SelectQuery -from posthog.hogql.base import Expr -from posthog.hogql.context import HogQLContext -from posthog.hogql.printer import print_ast -from posthog.models import Person -from posthog.models.filters import SessionRecordingsFilter -from posthog.schema import PersonsOnEventsMode -from posthog.session_recordings.queries.session_recording_list_from_filters import ( - SessionRecordingListFromFilters, -) -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary -from posthog.session_recordings.sql.session_replay_event_sql import TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL -from posthog.test.base import ( - APIBaseTest, - ClickhouseTestMixin, - QueryMatchingTest, - snapshot_clickhouse_queries, - _create_event, -) - - -# The HogQL pair of TestClickhouseSessionRecordingsListFromSessionReplay can be renamed when delete the old one -@freeze_time("2021-01-01T13:46:23") -class TestClickhouseSessionRecordingsListFromFilters(ClickhouseTestMixin, APIBaseTest, QueryMatchingTest): - def _print_query(self, query: SelectQuery) -> str: - return print_ast( - query, - HogQLContext(team_id=self.team.pk, enable_select_queries=True), - "clickhouse", - pretty=True, - ) - - def tearDown(self) -> None: - sync_execute(TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL()) - - @property - def base_time(self): - return (now() - relativedelta(hours=1)).replace(microsecond=0, second=0) - - def create_event( - self, - distinct_id, - timestamp, - team=None, - event_name="$pageview", - properties=None, - ): - if team is None: - team = self.team - if properties is None: - properties = {"$os": "Windows 95", "$current_url": "aloha.com/2"} - return _create_event( - team=team, - event=event_name, - timestamp=timestamp, - distinct_id=distinct_id, - properties=properties, - ) - - @parameterized.expand( - [ - [ - "test_poe_v1_still_falls_back_to_person_subquery", - True, - False, - False, - PersonsOnEventsMode.PERSON_ID_NO_OVERRIDE_PROPERTIES_ON_EVENTS, - ], - [ - "test_poe_being_unavailable_we_fall_back_to_person_id_overrides", - False, - False, - False, - PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_JOINED, - ], - [ - "test_poe_being_unavailable_we_fall_back_to_person_subquery_but_still_use_mat_props", - False, - False, - False, - PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_JOINED, - ], - [ - "test_allow_denormalised_props_fix_does_not_stop_all_poe_processing", - False, - True, - False, - PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS, - ], - [ - "test_poe_v2_available_person_properties_are_used_in_replay_listing", - False, - True, - True, - PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS, - ], - ] - ) - def test_effect_of_poe_settings_on_query_generated( - self, - _name: str, - poe_v1: bool, - poe_v2: bool, - allow_denormalized_props: bool, - expected_poe_mode: PersonsOnEventsMode, - ) -> None: - with self.settings( - PERSON_ON_EVENTS_OVERRIDE=poe_v1, - PERSON_ON_EVENTS_V2_OVERRIDE=poe_v2, - ALLOW_DENORMALIZED_PROPS_IN_LISTING=allow_denormalized_props, - ): - assert self.team.person_on_events_mode == expected_poe_mode - materialize("events", "rgInternal", table_column="person_properties") - - filter = SessionRecordingsFilter( - team=self.team, - data={ - "properties": [ - { - "key": "rgInternal", - "value": ["false"], - "operator": "exact", - "type": "person", - } - ] - }, - ) - session_recording_list_instance = SessionRecordingListFromFilters( - filter=filter, team=self.team, hogql_query_modifiers=None - ) - - hogql_parsed_select = session_recording_list_instance.get_query() - printed_query = self._print_query(hogql_parsed_select) - - person_filtering_expr = self._matching_person_filter_expr_from(hogql_parsed_select) - - self._assert_is_events_person_filter(person_filtering_expr) - - if poe_v1 or poe_v2: - # Property used directly from event (from materialized column) - assert "ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null')" in printed_query - else: - # We get the person property value from the persons JOIN - assert re.search( - r"argMax\(replaceRegexpAll\(nullIf\(nullIf\(JSONExtractRaw\(person\.properties, %\(hogql_val_\d+\)s\), ''\), 'null'\), '^\"|\"\$', ''\), person\.version\) AS properties___rgInternal", - printed_query, - ) - # Then we actually filter on that property value - assert re.search( - r"ifNull\(equals\(events__person\.properties___rgInternal, %\(hogql_val_\d+\)s\), 0\)", - printed_query, - ) - self.assertQueryMatchesSnapshot(printed_query) - - def _assert_is_pdi_filter(self, person_filtering_expr: list[Expr]) -> None: - assert person_filtering_expr[0].right.select_from.table.chain == ["person_distinct_ids"] - assert person_filtering_expr[0].right.where.left.chain == ["person", "properties", "rgInternal"] - - def _assert_is_events_person_filter(self, person_filtering_expr: list[Expr]) -> None: - assert person_filtering_expr[0].right.select_from.table.chain == ["events"] - event_person_condition = [ - x - for x in person_filtering_expr[0].right.where.exprs - if isinstance(x, CompareOperation) and x.left.chain == ["person", "properties", "rgInternal"] - ] - assert len(event_person_condition) == 1 - - def _matching_person_filter_expr_from(self, hogql_parsed_select: SelectQuery) -> list[Expr]: - where_conditions: list[Expr] = hogql_parsed_select.where.exprs - ands = [x for x in where_conditions if isinstance(x, And)] - assert len(ands) == 1 - and_comparisons = [x for x in ands[0].exprs if isinstance(x, CompareOperation)] - assert len(and_comparisons) == 1 - assert isinstance(and_comparisons[0].right, SelectQuery) - return and_comparisons - - settings_combinations = [ - ["poe v2 and materialized columns allowed", False, True, True], - ["poe v2 and materialized columns off", False, True, False], - ["poe off and materialized columns allowed", False, False, True], - ["poe off and materialized columns not allowed", False, False, False], - ["poe v1 and materialized columns allowed", True, False, True], - ["poe v1 and not materialized columns not allowed", True, False, False], - ] - - # Options for "materialize person columns" - materialization_options = [ - [" with materialization", True], - [" without materialization", False], - ] - - # Expand the parameter list to the product of all combinations with "materialize person columns" - # e.g. [a, b] x [c, d] = [a, c], [a, d], [b, c], [b, d] - test_case_combinations = [ - [f"{name}{mat_option}", poe_v1, poe, mat_columns, mat_person] - for (name, poe_v1, poe, mat_columns), (mat_option, mat_person) in product( - settings_combinations, materialization_options - ) - ] - - @parameterized.expand(test_case_combinations) - @snapshot_clickhouse_queries - def test_event_filter_with_person_properties_materialized( - self, - _name: str, - poe1_enabled: bool, - poe2_enabled: bool, - allow_denormalised_props: bool, - materialize_person_props: bool, - ) -> None: - # KLUDGE: I couldn't figure out how to use @also_test_with_materialized_columns(person_properties=["email"]) - # KLUDGE: and the parameterized.expand decorator at the same time, so we generate test case combos - # KLUDGE: for materialization on and off to test both sides the way the decorator would have - if materialize_person_props: - materialize("events", "email", table_column="person_properties") - materialize("person", "email") - - with self.settings( - PERSON_ON_EVENTS_OVERRIDE=poe1_enabled, - PERSON_ON_EVENTS_V2_OVERRIDE=poe2_enabled, - ALLOW_DENORMALIZED_PROPS_IN_LISTING=allow_denormalised_props, - ): - user_one = "test_event_filter_with_person_properties-user" - user_two = "test_event_filter_with_person_properties-user2" - session_id_one = f"test_event_filter_with_person_properties-1-{str(uuid4())}" - session_id_two = f"test_event_filter_with_person_properties-2-{str(uuid4())}" - - Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "bla"}) - Person.objects.create(team=self.team, distinct_ids=[user_two], properties={"email": "bla2"}) - - self._add_replay_with_pageview(session_id_one, user_one) - produce_replay_summary( - distinct_id=user_one, - session_id=session_id_one, - first_timestamp=(self.base_time + relativedelta(seconds=30)), - team_id=self.team.id, - ) - self._add_replay_with_pageview(session_id_two, user_two) - produce_replay_summary( - distinct_id=user_two, - session_id=session_id_two, - first_timestamp=(self.base_time + relativedelta(seconds=30)), - team_id=self.team.id, - ) - - match_everyone_filter = SessionRecordingsFilter( - team=self.team, - data={"properties": []}, - ) - - session_recording_list_instance = SessionRecordingListFromFilters( - filter=match_everyone_filter, team=self.team, hogql_query_modifiers=None - ) - (session_recordings, _, _) = session_recording_list_instance.run() - - assert sorted([x["session_id"] for x in session_recordings]) == sorted([session_id_one, session_id_two]) - - match_bla_filter = SessionRecordingsFilter( - team=self.team, - data={ - "properties": [ - { - "key": "email", - "value": ["bla"], - "operator": "exact", - "type": "person", - } - ] - }, - ) - - session_recording_list_instance = SessionRecordingListFromFilters( - filter=match_bla_filter, team=self.team, hogql_query_modifiers=None - ) - (session_recordings, _, _) = session_recording_list_instance.run() - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id_one - - def _add_replay_with_pageview(self, session_id: str, user: str) -> None: - self.create_event( - user, - self.base_time, - properties={"$session_id": session_id, "$window_id": str(uuid4())}, - ) - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=self.base_time, - team_id=self.team.id, - ) - - @parameterized.expand(test_case_combinations) - @snapshot_clickhouse_queries - def test_person_id_filter( - self, - _name: str, - poe2_enabled: bool, - poe1_enabled: bool, - allow_denormalised_props: bool, - materialize_person_props: bool, - ) -> None: - # KLUDGE: I couldn't figure out how to use @also_test_with_materialized_columns(person_properties=["email"]) - # KLUDGE: and the parameterized.expand decorator at the same time, so we generate test case combos - # KLUDGE: for materialization on and off to test both sides the way the decorator would have - if materialize_person_props: - # it shouldn't matter to this test whether any column is materialized - # but let's keep the tests in this file similar so we flush out any unexpected interactions - materialize("events", "email", table_column="person_properties") - materialize("person", "email") - - with self.settings( - PERSON_ON_EVENTS_OVERRIDE=poe1_enabled, - PERSON_ON_EVENTS_V2_OVERRIDE=poe2_enabled, - ALLOW_DENORMALIZED_PROPS_IN_LISTING=allow_denormalised_props, - ): - three_user_ids = ["person-1-distinct-1", "person-1-distinct-2", "person-2"] - session_id_one = f"test_person_id_filter-session-one" - session_id_two = f"test_person_id_filter-session-two" - session_id_three = f"test_person_id_filter-session-three" - - p = Person.objects.create( - team=self.team, - distinct_ids=[three_user_ids[0], three_user_ids[1]], - properties={"email": "bla"}, - ) - Person.objects.create( - team=self.team, - distinct_ids=[three_user_ids[2]], - properties={"email": "bla2"}, - ) - - self._add_replay_with_pageview(session_id_one, three_user_ids[0]) - self._add_replay_with_pageview(session_id_two, three_user_ids[1]) - self._add_replay_with_pageview(session_id_three, three_user_ids[2]) - - filter = SessionRecordingsFilter(team=self.team, data={"person_uuid": str(p.uuid)}) - session_recording_list_instance = SessionRecordingListFromFilters( - filter=filter, team=self.team, hogql_query_modifiers=None - ) - (session_recordings, _, _) = session_recording_list_instance.run() - assert sorted([r["session_id"] for r in session_recordings]) == sorted([session_id_two, session_id_one]) diff --git a/ee/session_recordings/session_recording_playlist.py b/ee/session_recordings/session_recording_playlist.py index a3dc50c1228f5..7b1a962b187b7 100644 --- a/ee/session_recordings/session_recording_playlist.py +++ b/ee/session_recordings/session_recording_playlist.py @@ -1,4 +1,3 @@ -import json from typing import Any, Optional import structlog @@ -13,7 +12,7 @@ from posthog.api.forbid_destroy_model import ForbidDestroyModel from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import UserBasicSerializer -from posthog.constants import SESSION_RECORDINGS_FILTER_IDS, AvailableFeature +from posthog.constants import AvailableFeature from posthog.models import ( SessionRecording, SessionRecordingPlaylist, @@ -27,7 +26,6 @@ changes_between, log_activity, ) -from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter from posthog.models.team.team import check_is_feature_available_for_team from posthog.models.utils import UUIDT from posthog.rate_limit import ( @@ -37,7 +35,6 @@ from posthog.schema import RecordingsQuery from posthog.session_recordings.session_recording_api import ( list_recordings_response, - list_recordings, query_as_params_to_dict, list_recordings_from_query, ) @@ -230,19 +227,12 @@ def recordings(self, request: request.Request, *args: Any, **kwargs: Any) -> res .values_list("recording_id", flat=True) ) - use_query_type = (request.GET.get("as_query", "False")).lower() == "true" - - if use_query_type: - data_dict = query_as_params_to_dict(request.GET.dict()) - query = RecordingsQuery.model_validate(data_dict) - query.session_ids = playlist_items - return list_recordings_response( - list_recordings_from_query(query, request, context=self.get_serializer_context()) - ) - else: - filter = SessionRecordingsFilter(request=request, team=self.team) - filter = filter.shallow_clone({SESSION_RECORDINGS_FILTER_IDS: json.dumps(playlist_items)}) - return list_recordings_response(list_recordings(filter, request, context=self.get_serializer_context())) + data_dict = query_as_params_to_dict(request.GET.dict()) + query = RecordingsQuery.model_validate(data_dict) + query.session_ids = playlist_items + return list_recordings_response( + list_recordings_from_query(query, request, context=self.get_serializer_context()) + ) # As of now, you can only "update" a session recording by adding or removing a recording from a static playlist @action( diff --git a/ee/surveys/summaries/summarize_surveys.py b/ee/surveys/summaries/summarize_surveys.py index 1e4b088484f55..1b74ca04d60c8 100644 --- a/ee/surveys/summaries/summarize_surveys.py +++ b/ee/surveys/summaries/summarize_surveys.py @@ -121,7 +121,7 @@ def summarize_survey_responses( we're trying to identify what to work on use as concise and simple language as is possible. generate no text other than the summary. - the aim is to let people see themes in the responses received. return the text in github flavoured markdown format""", + the aim is to let people see themes in the responses received. return the text in markdown format without using any paragraph formatting""", }, ], user=f"{instance_region}/{user.pk}", @@ -131,5 +131,7 @@ def summarize_survey_responses( if usage: TOKENS_IN_PROMPT_HISTOGRAM.observe(usage) + logger.info("survey_summary_response", result=result) + content: str = result.choices[0].message.content or "" return {"content": content, "timings": timer.get_all_timings()} diff --git a/ee/urls.py b/ee/urls.py index 7c722bc31852f..91b58e0fcb238 100644 --- a/ee/urls.py +++ b/ee/urls.py @@ -6,11 +6,11 @@ from django.urls.conf import path from ee.api import integration -from .api.rbac import organization_resource_access, role from .api import ( authentication, billing, + conversation, dashboard_collaborator, explicit_team_member, feature_flag_role_access, @@ -19,18 +19,20 @@ sentry_stats, subscription, ) +from .api.rbac import organization_resource_access, role from .session_recordings import session_recording_playlist def extend_api_router() -> None: from posthog.api import ( - router as root_router, - register_grandfathered_environment_nested_viewset, - projects_router, - organizations_router, - project_feature_flags_router, environment_dashboards_router, + environments_router, legacy_project_dashboards_router, + organizations_router, + project_feature_flags_router, + projects_router, + register_grandfathered_environment_nested_viewset, + router as root_router, ) root_router.register(r"billing", billing.BillingViewset, "billing") @@ -93,6 +95,10 @@ def extend_api_router() -> None: ["project_id"], ) + environments_router.register( + r"conversations", conversation.ConversationViewSet, "environment_conversations", ["team_id"] + ) + # The admin interface is disabled on self-hosted instances, as its misuse can be unsafe admin_urlpatterns = ( diff --git a/frontend/__snapshots__/components-cards-text-card--template--dark.png b/frontend/__snapshots__/components-cards-text-card--template--dark.png index 1418925cf2961..08005be3d11b8 100644 Binary files a/frontend/__snapshots__/components-cards-text-card--template--dark.png and b/frontend/__snapshots__/components-cards-text-card--template--dark.png differ diff --git a/frontend/__snapshots__/components-cards-text-card--template--light.png b/frontend/__snapshots__/components-cards-text-card--template--light.png index c0961d67af400..fdd4b64968096 100644 Binary files a/frontend/__snapshots__/components-cards-text-card--template--light.png and b/frontend/__snapshots__/components-cards-text-card--template--light.png differ diff --git a/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--dark.png b/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--dark.png index 2caeb97213e25..25a32339e8edb 100644 Binary files a/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--dark.png and b/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--dark.png differ diff --git a/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--light.png b/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--light.png index ac6439a3b0f62..513b3ed148bb7 100644 Binary files a/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--light.png and b/frontend/__snapshots__/components-hogqleditor--hog-ql-editor--light.png differ diff --git a/frontend/__snapshots__/components-hogqleditor--no-value--dark.png b/frontend/__snapshots__/components-hogqleditor--no-value--dark.png index 1c538dbd29f44..5ceff04a1d206 100644 Binary files a/frontend/__snapshots__/components-hogqleditor--no-value--dark.png and b/frontend/__snapshots__/components-hogqleditor--no-value--dark.png differ diff --git a/frontend/__snapshots__/components-hogqleditor--no-value--light.png b/frontend/__snapshots__/components-hogqleditor--no-value--light.png index 401a4e98e08ce..2038e29ae4087 100644 Binary files a/frontend/__snapshots__/components-hogqleditor--no-value--light.png and b/frontend/__snapshots__/components-hogqleditor--no-value--light.png differ diff --git a/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--dark.png b/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--dark.png index 1c538dbd29f44..5ceff04a1d206 100644 Binary files a/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--dark.png and b/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--dark.png differ diff --git a/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--light.png b/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--light.png index 401a4e98e08ce..2038e29ae4087 100644 Binary files a/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--light.png and b/frontend/__snapshots__/components-hogqleditor--no-value-person-properties-disabled--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--dark.png index e9fe7de459ce9..2ad22e1251aa4 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--light.png index b31b13bf96316..61c142b66d388 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--all-slow--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--default--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--default--dark.png index 557a24ad829de..225413d72709d 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--default--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--default--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--default--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--default--light.png index 18eeafe53a294..0dddcd3fa4040 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--default--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--default--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--dark.png index 557a24ad829de..225413d72709d 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--light.png index 18eeafe53a294..0dddcd3fa4040 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--expanded--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--dark.png index 9c3fb1e344f74..acec91d9d5770 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--light.png index e41e7dec7df53..e35cf20e724cf 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-dom-interactive--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--dark.png index 2517ac22b8fc9..fac9168f5b922 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--light.png index 4fce8a837f7a9..9786d21e2280d 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-fcp--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--dark.png index d9b70116e57b6..772ca47300a43 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--light.png index 88ad89c98e8d9..33839ce93e4d1 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--really-slow-load-event--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--dark.png index 0dac5dff8a31f..876339c60c48f 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--light.png index b6af1c8e969b6..09f1c1bbb0700 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-dom-interactive--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--dark.png index 7ca69b7425a30..85b075a0f290d 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--light.png index 4a9a95e7c36d7..4716324e22c09 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-fcp--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--dark.png index 657492c654a5d..f61d8a1aa9217 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--light.png index 7be9fca3cf6be..4125a9641deab 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--slow-load-event--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--dark.png index ab975529f0e67..197dba2e766b2 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--light.png index a4a1c5246525b..e3f9727ae5b77 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-fast--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--dark.png index 0cc51fee095f6..fe75b85b1853b 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--light.png index 312e4ac544ef9..53680276c8d44 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-medium--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--dark.png index 13921a926aa45..56ec6eb839a55 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--light.png index bca3d330610fc..c415408379d3b 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-all-slow--light.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--dark.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--dark.png index 488140bfb1571..ae176c67d2aa2 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--dark.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--dark.png differ diff --git a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--light.png b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--light.png index 5dea38e2b1c7f..7e9e414273a44 100644 Binary files a/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--light.png and b/frontend/__snapshots__/components-networkrequest-navigationitem--web-vitals-loading--light.png differ diff --git a/frontend/__snapshots__/components-playerinspector--default--dark.png b/frontend/__snapshots__/components-playerinspector--default--dark.png index 51580b9660e6f..87c483c27613a 100644 Binary files a/frontend/__snapshots__/components-playerinspector--default--dark.png and b/frontend/__snapshots__/components-playerinspector--default--dark.png differ diff --git a/frontend/__snapshots__/components-playerinspector--default--light.png b/frontend/__snapshots__/components-playerinspector--default--light.png index 42395f0be8cb4..04f141310bd43 100644 Binary files a/frontend/__snapshots__/components-playerinspector--default--light.png and b/frontend/__snapshots__/components-playerinspector--default--light.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png index cb009d55395dd..b529d9ec4168f 100644 Binary files a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png index 4c8ac367ea329..5598463df851b 100644 Binary files a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--dark.png index 692203c75b49e..2796435eb68ff 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--light.png index 63975d42c24c9..958d095dc7b38 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--custom-styles--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--dark.png index af5eb35b4a393..320f0dfb39709 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--light.png index b7c759035f7be..1ec1d92d13bc5 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--default--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--dark.png index c70073759a9c3..e2a7d303c9bf3 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--light.png index fd30577a0f79f..c67e7eff5f2d6 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--friday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--dark.png index 97b6b6671f4b7..62005fd240d46 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--light.png index 46de463f355a3..deb1a4130559e 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--hour--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--dark.png index 6e53b5cd15413..ccb0203a5d2c3 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--light.png index 6d29186846f99..c7001a1113b24 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--minute--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--dark.png index f86bca1b53ab8..0d37818b855a9 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--light.png index 3015065991f8b..32569ecc291ad 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--multiple-months--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--dark.png index d3cdc976bcd4d..0ca2d8c95c36b 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--light.png index 989bb7dc4e1f3..1382bcae5ed6b 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--saturday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--dark.png index af5eb35b4a393..320f0dfb39709 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--light.png index b7c759035f7be..1ec1d92d13bc5 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--sunday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--dark.png index f1cc69206e84b..d6279a75e1b90 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--light.png index 65cfd36b3081a..39a2c68e2cd5e 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--thursday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--dark.png index 13aa1b9a431a2..bcc3113d51f69 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--light.png index a575e4ef6b228..161c7033dbea2 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar--wednesday-first--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--dark.png index fc30b65924d01..841266fbca5bf 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--light.png index 3737e331119c9..514ed70766db7 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--default--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--dark.png index c07fa500efba1..022a1933f0729 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--light.png index dd6fa19630b65..8081ad0512c0a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--hour--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--dark.png index 09f57c49a2ddd..35a1560f55720 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--light.png index 7357eb0db768b..be88a38b80777 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--minute--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--dark.png index 0376fb230f343..0728b9da4ad59 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--light.png index 655ce9d87c47c..eda77f53e84d4 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--past--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--dark.png index 6b80d80b1df93..bf29f763f7162 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--light.png index 8248c8be132db..331ac9df5287a 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--upcoming--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--dark.png index f595113e318ad..57fa737a559ed 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--light.png index f8fbf21738bde..91bf217461bb6 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--light.png and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--dark.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--dark.png new file mode 100644 index 0000000000000..5d0408386245a Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--light.png b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--light.png new file mode 100644 index 0000000000000..6386c0c6b6f99 Binary files /dev/null and b/frontend/__snapshots__/lemon-ui-lemon-calendar-lemon-calendar-select--with-time-toggle-and-multiple-months--light.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png index e17597e989d60..f5ed8a89ac925 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png index afcf5cd9c035f..b2493c895f8bd 100644 Binary files a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png and b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000--light.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--dashboards--dark.png b/frontend/__snapshots__/posthog-3000-sidebar--dashboards--dark.png index d66b1f11c1a6b..758c48c0daa81 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--dashboards--dark.png and b/frontend/__snapshots__/posthog-3000-sidebar--dashboards--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--dashboards--light.png b/frontend/__snapshots__/posthog-3000-sidebar--dashboards--light.png index eef82575bea8b..c05ba32634144 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--dashboards--light.png and b/frontend/__snapshots__/posthog-3000-sidebar--dashboards--light.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--dark.png b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--dark.png index 687b3e5258355..8c556eeb82689 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--dark.png and b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--dark.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--light.png b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--light.png index 01823e88fef50..6600ad600011f 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--light.png and b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags--light.png differ diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png index 7c84465a4fd8f..e05ec0ee257d3 100644 Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png differ diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png index 8008f3eb43eca..4d5f9882073b5 100644 Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png index c02be573f0f34..2392b90f55117 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png index ace7d3debd600..130aa9a287ffd 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png index 03870e07340f4..93ad7064fd68b 100644 Binary files a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png index 576caf4f434a2..537566ccb05ab 100644 Binary files a/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png index 5db2152719be9..df722bd0f2d26 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png index 0aad92c68ce5f..477239b6b7587 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png b/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png index 899ff6dbf5378..7fbea863d2331 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png and b/frontend/__snapshots__/scenes-app-errortracking--list-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png b/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png index 0e1b38e9c70ad..fa92d131cd17f 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png and b/frontend/__snapshots__/scenes-app-errortracking--list-page--light.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png index 08312752ac112..835b68b2ea10d 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png and b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png index 8a55f274bf3f3..64f704c6a75bc 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png and b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png index 587abfccb4aa5..a1c8d5a8d59af 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png index ac19e318a3f94..695536b9bcac8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png index 8ad5ae4d07ef6..73e5f193806a7 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png index 64bcb81eb0d5d..6512dbd87e1db 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-historical-trends-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png index 25ab8cd06fd02..00916265cd38b 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png index d4389462f0b58..248d1f34b318a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--dark.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--dark.png index 53a38ebe97627..7f17c93cf8695 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--dark.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--light.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--light.png index b6956d05b33b5..149a6d0c31e64 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--light.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--long-loading--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--dark.png b/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--dark.png index ab8433877a9e5..64cc334785eda 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--light.png b/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--light.png index e5bac9eeee8f1..756eb74f6156b 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--light.png and b/frontend/__snapshots__/scenes-app-max-ai--empty-thread-loading--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--dark.png b/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--dark.png index db43716077ebe..27244118ba4a0 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--light.png b/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--light.png index cc409784b1c45..6f13b687b5156 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--light.png and b/frontend/__snapshots__/scenes-app-max-ai--generation-failure-thread--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png b/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png index 6f6b491670b99..78d4b44700226 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread--light.png b/frontend/__snapshots__/scenes-app-max-ai--thread--light.png index c456bd0c5fde0..d300a75be4dc6 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread--light.png and b/frontend/__snapshots__/scenes-app-max-ai--thread--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--dark.png b/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--dark.png index 803390e1b9822..f7e1627246573 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--light.png b/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--light.png index b785e83e2206f..1bcb94198a8b6 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--light.png and b/frontend/__snapshots__/scenes-app-max-ai--thread-with-rate-limit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--dark.png b/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--dark.png index be67ab75d125b..eee71a3783a06 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--light.png b/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--light.png index cf026b65c4994..358c01f1ccdae 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--light.png and b/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--light.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png index d606d44e5e440..1c682a169746a 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png index 19fd725a92aca..4a8389daacd3a 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png index 47129e328b11d..d39232bc20b73 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png index 30c69f8c33417..a128cf65fde2e 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png b/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png index 505db4406be40..3287c8e015277 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png and b/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--light.png b/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--light.png index 93338096ad6c9..95511e628778d 100644 Binary files a/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--light.png and b/frontend/__snapshots__/scenes-other-onboarding--onboarding-billing--light.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--dark.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--dark.png index b97b2329fdd96..9533045495757 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--dark.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--light.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--light.png index e797cc6467cfc..e6876dcaf9403 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--light.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-admin--light.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--dark.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--dark.png index 84a4c839eff8d..c299d9caf9d5d 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--dark.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--light.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--light.png index 750ff00450e15..3a632563f3dd4 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--light.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-member--light.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--dark.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--dark.png index b97b2329fdd96..9533045495757 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--dark.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--light.png b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--light.png index e797cc6467cfc..e6876dcaf9403 100644 Binary files a/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--light.png and b/frontend/__snapshots__/scenes-other-org-member-invites--current-user-is-owner--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png index 27889851dde0e..32d397abf284c 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-organization--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png b/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png index 53d8c00a4492e..7ae06bd540053 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-organization--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png index a3dab4071bd23..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png index 3a2fe7a7a7229..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png index dad63de9c216f..d2302cf2491a4 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png index 5dae24aa10f4a..ccab613598663 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png index a3dab4071bd23..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png index 3a2fe7a7a7229..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png index a3dab4071bd23..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png index 3a2fe7a7a7229..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png index a3dab4071bd23..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png index 3a2fe7a7a7229..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png index a3dab4071bd23..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png index 3a2fe7a7a7229..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png index a3dab4071bd23..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png index 3a2fe7a7a7229..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png index a3dab4071bd23..bcef1677c71eb 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png index 3a2fe7a7a7229..bf25bd234a23a 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png index 0c0af5f10249e..f0f85a24787f5 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user--light.png b/frontend/__snapshots__/scenes-other-settings--settings-user--light.png index 83d22ed903616..4cd020a3df66e 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-user--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png index 8168f2f6ab752..d16b8c42a3cd8 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png index cadfa35ff0a63..44413dfa61cd9 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png differ diff --git a/frontend/src/layout/GlobalModals.tsx b/frontend/src/layout/GlobalModals.tsx index 803bb2c5e8237..81bdae758064a 100644 --- a/frontend/src/layout/GlobalModals.tsx +++ b/frontend/src/layout/GlobalModals.tsx @@ -5,14 +5,12 @@ import { TimeSensitiveAuthenticationModal } from 'lib/components/TimeSensitiveAu import { UpgradeModal } from 'lib/components/UpgradeModal/UpgradeModal' import { TwoFactorSetupModal } from 'scenes/authentication/TwoFactorSetupModal' import { CreateOrganizationModal } from 'scenes/organization/CreateOrganizationModal' -import { membersLogic } from 'scenes/organization/membersLogic' import { CreateEnvironmentModal } from 'scenes/project/CreateEnvironmentModal' import { CreateProjectModal } from 'scenes/project/CreateProjectModal' import { SessionPlayerModal } from 'scenes/session-recordings/player/modal/SessionPlayerModal' import { inviteLogic } from 'scenes/settings/organization/inviteLogic' import { InviteModal } from 'scenes/settings/organization/InviteModal' import { PreviewingCustomCssModal } from 'scenes/themes/PreviewingCustomCssModal' -import { userLogic } from 'scenes/userLogic' import type { globalModalsLogicType } from './GlobalModalsType' @@ -58,7 +56,6 @@ export function GlobalModals(): JSX.Element { useActions(globalModalsLogic) const { isInviteModalShown } = useValues(inviteLogic) const { hideInviteModal } = useActions(inviteLogic) - const { user } = useValues(userLogic) return ( <> @@ -71,17 +68,7 @@ export function GlobalModals(): JSX.Element { - {user && user.organization?.enforce_2fa && !user.is_2fa_enabled && ( - { - userLogic.actions.loadUser() - membersLogic.actions.loadAllMembers() - }} - forceOpen - closable={false} - required={true} - /> - )} + ) diff --git a/frontend/src/layout/navigation-3000/Navigation.scss b/frontend/src/layout/navigation-3000/Navigation.scss index df5f78ab272c6..42bb779a54d82 100644 --- a/frontend/src/layout/navigation-3000/Navigation.scss +++ b/frontend/src/layout/navigation-3000/Navigation.scss @@ -175,7 +175,7 @@ .Sidebar3000 { --sidebar-slider-padding: 0.125rem; --sidebar-horizontal-padding: 0.5rem; - --sidebar-row-height: 2.5rem; + --sidebar-row-height: 3rem; --sidebar-background: var(--bg-3000); position: relative; @@ -533,8 +533,6 @@ position: relative; display: flex; - flex-direction: column; - justify-content: center; width: 100%; height: 100%; color: inherit; @@ -549,7 +547,9 @@ } .SidebarListItem__link { + flex-direction: column; row-gap: 1px; + justify-content: center; padding: 0 var(--sidebar-horizontal-padding) 0 var(--sidebar-list-item-inset); color: inherit !important; // Disable link color .SidebarListItem[aria-disabled='true'] & { @@ -558,17 +558,33 @@ } .SidebarListItem__button { + flex-direction: row; + gap: 0.25rem; row-gap: 1px; + align-items: center; padding: 0 var(--sidebar-horizontal-padding) 0 var(--sidebar-list-item-inset); + font-size: 1.125rem; // Make icons bigger color: inherit !important; // Disable link color cursor: pointer; &:hover { background: var(--border-3000); } + + .SidebarListItem__icon { + flex-shrink: 0; + } + + .SidebarListItem__name { + overflow: hidden; + text-overflow: ellipsis; + } } .SidebarListItem__rename { + flex-direction: column; + justify-content: center; + // Pseudo-elements don't work on inputs, so we use a wrapper div background: var(--bg-light); diff --git a/frontend/src/layout/navigation-3000/components/SidebarList.tsx b/frontend/src/layout/navigation-3000/components/SidebarList.tsx index 2b63b9a61e9c6..65cd05d65c4d4 100644 --- a/frontend/src/layout/navigation-3000/components/SidebarList.tsx +++ b/frontend/src/layout/navigation-3000/components/SidebarList.tsx @@ -232,7 +232,8 @@ function SidebarListItem({ item, validateName, active, style }: SidebarListItemP if (isItemClickable(item)) { content = (
  • -
    {item.name}
    + {item.icon &&
    {item.icon}
    } +
    {item.name}
  • ) } else if (!save || (!isItemTentative(item) && newName === null)) { diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx index 81b31df645706..244e42c52d936 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx @@ -174,7 +174,7 @@ export const sidePanelActivityLogic = kea([ ], })), - listeners(({ values, actions }) => ({ + listeners(({ values, actions, cache }) => ({ setActiveTab: ({ tab }) => { if (tab === SidePanelActivityTab.All && !values.allActivityResponseLoading) { actions.loadAllActivity() @@ -191,6 +191,13 @@ export const sidePanelActivityLogic = kea([ actions.setActiveTab(options as SidePanelActivityTab) } }, + togglePolling: ({ pageIsVisible }) => { + if (pageIsVisible) { + actions.loadImportantChanges() + } else { + clearTimeout(cache.pollTimeout) + } + }, })), selectors({ allActivity: [ diff --git a/frontend/src/layout/navigation-3000/types.ts b/frontend/src/layout/navigation-3000/types.ts index 3f79f6dbda42f..a941e7dfaad74 100644 --- a/frontend/src/layout/navigation-3000/types.ts +++ b/frontend/src/layout/navigation-3000/types.ts @@ -151,4 +151,5 @@ export interface TentativeListItem { export interface ButtonListItem extends BasicListItem { key: '__button__' onClick: () => void + icon?: JSX.Element } diff --git a/frontend/src/layout/navigation/ProjectNotice.tsx b/frontend/src/layout/navigation/ProjectNotice.tsx index 5a0a9c4dd28ce..8d4df0246a3a0 100644 --- a/frontend/src/layout/navigation/ProjectNotice.tsx +++ b/frontend/src/layout/navigation/ProjectNotice.tsx @@ -1,4 +1,5 @@ import { IconGear, IconPlus } from '@posthog/icons' +import { Spinner } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { dayjs } from 'lib/dayjs' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' @@ -22,17 +23,29 @@ interface ProjectNoticeBlueprint { closeable?: boolean } -function CountDown({ datetime }: { datetime: dayjs.Dayjs }): JSX.Element { +function CountDown({ datetime, callback }: { datetime: dayjs.Dayjs; callback?: () => void }): JSX.Element { const [now, setNow] = useState(dayjs()) + // Format the time difference as 00:00:00 + const duration = dayjs.duration(datetime.diff(now)) + const pastCountdown = duration.seconds() < 0 + + const countdown = pastCountdown + ? 'Expired' + : duration.hours() > 0 + ? duration.format('HH:mm:ss') + : duration.format('mm:ss') + useEffect(() => { const interval = setInterval(() => setNow(dayjs()), 1000) return () => clearInterval(interval) }, []) - // Format the time difference as 00:00:00 - const duration = dayjs.duration(datetime.diff(now)) - const countdown = duration.hours() > 0 ? duration.format('HH:mm:ss') : duration.format('mm:ss') + useEffect(() => { + if (pastCountdown) { + callback?.() + } + }, [pastCountdown]) return <>{countdown} } @@ -40,8 +53,8 @@ function CountDown({ datetime }: { datetime: dayjs.Dayjs }): JSX.Element { export function ProjectNotice(): JSX.Element | null { const { projectNoticeVariant } = useValues(navigationLogic) const { currentOrganization } = useValues(organizationLogic) - const { logout } = useActions(userLogic) - const { user } = useValues(userLogic) + const { logout, loadUser } = useActions(userLogic) + const { user, userLoading } = useValues(userLogic) const { closeProjectNotice } = useActions(navigationLogic) const { showInviteModal } = useActions(inviteLogic) const { requestVerificationLink } = useActions(verifyEmailLogic) @@ -124,7 +137,14 @@ export function ProjectNotice(): JSX.Element | null { You are currently logged in as a customer.{' '} {user?.is_impersonated_until && ( <> - Expires in + Expires in + {userLoading ? ( + + ) : ( + loadUser()}> + Refresh + + )} )} diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 81587232b3b07..f1497f937c334 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -54,6 +54,7 @@ import { ExternalDataSourceSyncSchema, ExternalDataSourceType, FeatureFlagAssociatedRoleType, + FeatureFlagStatusResponse, FeatureFlagType, Group, GroupListParams, @@ -424,8 +425,8 @@ class ApiRequest { return this.events(teamId).addPathComponent(id) } - public tags(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('tags') + public tags(projectId?: ProjectType['id']): ApiRequest { + return this.projectsDetail(projectId).addPathComponent('tags') } // # Data management @@ -520,8 +521,11 @@ class ApiRequest { return this.dashboards(teamId).addPathComponent(dashboardId) } - public dashboardCollaborators(dashboardId: DashboardType['id'], teamId?: TeamType['id']): ApiRequest { - return this.dashboardsDetail(dashboardId, teamId).addPathComponent('collaborators') + public dashboardCollaborators( + dashboardId: DashboardType['id'], + projectId: ProjectType['id'] = ApiConfig.getCurrentProjectId() // Collaborators endpoint is project-level, not team-level + ): ApiRequest { + return this.dashboardsDetail(dashboardId, projectId).addPathComponent('collaborators') } public dashboardSharing(dashboardId: DashboardType['id'], teamId?: TeamType['id']): ApiRequest { @@ -531,9 +535,9 @@ class ApiRequest { public dashboardCollaboratorsDetail( dashboardId: DashboardType['id'], userUuid: UserType['uuid'], - teamId?: TeamType['id'] + projectId?: ProjectType['id'] ): ApiRequest { - return this.dashboardCollaborators(dashboardId, teamId).addPathComponent(userUuid) + return this.dashboardCollaborators(dashboardId, projectId).addPathComponent(userUuid) } // # Dashboard templates @@ -663,6 +667,13 @@ class ApiRequest { ) } + public featureFlagStatus(teamId: TeamType['id'], featureFlagId: FeatureFlagType['id']): ApiRequest { + return this.projectsDetail(teamId) + .addPathComponent('feature_flags') + .addPathComponent(String(featureFlagId)) + .addPathComponent('status') + } + public featureFlagCreateScheduledChange(teamId: TeamType['id']): ApiRequest { return this.projectsDetail(teamId).addPathComponent('scheduled_changes') } @@ -834,9 +845,9 @@ class ApiRequest { return apiRequest } - // Chat - public chat(teamId?: TeamType['id']): ApiRequest { - return this.environmentsDetail(teamId).addPathComponent('query').addPathComponent('chat') + // Conversations + public conversations(teamId?: TeamType['id']): ApiRequest { + return this.environmentsDetail(teamId).addPathComponent('conversations') } // Notebooks @@ -1042,6 +1053,12 @@ const api = { ): Promise<{ scheduled_change: ScheduledChangeType }> { return await new ApiRequest().featureFlagDeleteScheduledChange(teamId, scheduledChangeId).delete() }, + async getStatus( + teamId: TeamType['id'], + featureFlagId: FeatureFlagType['id'] + ): Promise { + return await new ApiRequest().featureFlagStatus(teamId, featureFlagId).get() + }, }, organizationFeatureFlags: { @@ -1283,8 +1300,8 @@ const api = { }, tags: { - async list(teamId: TeamType['id'] = ApiConfig.getCurrentTeamId()): Promise { - return new ApiRequest().tags(teamId).get() + async list(projectId: TeamType['id'] = ApiConfig.getCurrentProjectId()): Promise { + return new ApiRequest().tags(projectId).get() }, }, @@ -2530,12 +2547,10 @@ const api = { }) }, - chatURL: (): string => { - return new ApiRequest().chat().assembleFullUrl() - }, - - async chat(data: any): Promise { - return await api.createResponse(this.chatURL(), data) + conversations: { + async create(data: { content: string; conversation?: string | null }): Promise { + return api.createResponse(new ApiRequest().conversations().assembleFullUrl(), data) + }, }, /** Fetch data from specified URL. The result already is JSON-parsed. */ diff --git a/frontend/src/lib/components/Cards/TextCard/TextCard.tsx b/frontend/src/lib/components/Cards/TextCard/TextCard.tsx index 34c889fd00cee..698530f338d4a 100644 --- a/frontend/src/lib/components/Cards/TextCard/TextCard.tsx +++ b/frontend/src/lib/components/Cards/TextCard/TextCard.tsx @@ -34,8 +34,8 @@ interface TextCardBodyProps extends Pick, ' export function TextContent({ text, closeDetails, className }: TextCardBodyProps): JSX.Element { return ( -
    closeDetails?.()}> - {text} +
    closeDetails?.()}> + {text}
    ) } @@ -143,7 +143,7 @@ export function TextCardInternal( )}
    - +
    {showResizeHandles && ( diff --git a/frontend/src/lib/components/HogQLEditor/HogQLEditor.tsx b/frontend/src/lib/components/HogQLEditor/HogQLEditor.tsx index 9d0cf1f42b893..95b809878a98d 100644 --- a/frontend/src/lib/components/HogQLEditor/HogQLEditor.tsx +++ b/frontend/src/lib/components/HogQLEditor/HogQLEditor.tsx @@ -58,7 +58,7 @@ export function HogQLEditor({ {placeholder ?? (metadataSource && isActorsQuery(metadataSource) ? "Enter HogQL expression, such as:\n- properties.$geoip_country_name\n- toInt(properties.$browser_version) * 10\n- concat(properties.name, ' <', properties.email, '>')\n- is_identified ? 'user' : 'anon'" - : "Enter HogQL Expression, such as:\n- properties.$current_url\n- person.properties.$geoip_country_name\n- toInt(properties.`Long Field Name`) * 10\n- concat(event, ' ', distinct_id)\n- if(1 < 2, 'small', 'large')")} + : "Enter HogQL Expression, such as:\n- properties.$current_url\n- person.properties.$geoip_country_name\n- pdi.person.properties.email\n- toInt(properties.`Long Field Name`) * 10\n- concat(event, ' ', distinct_id)\n- if(1 < 2, 'small', 'large')")}
    grouped by - { - updateQuerySource({ interval: value } as Partial) - }} - options={Object.entries(enabledIntervals).map(([value, { label, disabledReason, hidden }]) => ({ - value: value as IntervalType, - label, - hidden, - disabledReason, - }))} - /> + {isIntervalManuallySet ? ( + { + setIsIntervalManuallySet(false) + }} + tooltip="Unpin interval" + className="flex-1" + center + size="small" + icon={} + > + {interval || 'day'} + + ) : ( + { + updateQuerySource({ interval: value } as Partial) + }} + options={Object.entries(enabledIntervals).map(([value, { label, disabledReason, hidden }]) => ({ + value: value as IntervalType, + label, + hidden, + disabledReason, + }))} + /> + )} ) } diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx index 756844702c3e5..54389b51c0271 100644 --- a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx +++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx @@ -166,7 +166,7 @@ function ValueDisplay({ } > : undefined} > @@ -222,8 +222,8 @@ export function PropertiesTable({ parent, }: PropertiesTableType): JSX.Element { const [searchTerm, setSearchTerm] = useState('') - const { hidePostHogPropertiesInTable } = useValues(userPreferencesLogic) - const { setHidePostHogPropertiesInTable } = useActions(userPreferencesLogic) + const { hidePostHogPropertiesInTable, hideNullValues } = useValues(userPreferencesLogic) + const { setHidePostHogPropertiesInTable, setHideNullValues } = useActions(userPreferencesLogic) const { isCloudOrDev } = useValues(preflightLogic) const objectProperties = useMemo(() => { @@ -283,11 +283,18 @@ export function PropertiesTable({ }) } - if (filterable && hidePostHogPropertiesInTable) { - entries = entries.filter(([key]) => { - const isPostHogProperty = key.startsWith('$') || PROPERTY_KEYS.includes(key) - const isNonDollarPostHogProperty = isCloudOrDev && CLOUD_INTERNAL_POSTHOG_PROPERTY_KEYS.includes(key) - return !isPostHogProperty && !isNonDollarPostHogProperty + if (filterable) { + entries = entries.filter(([key, value]) => { + if (hideNullValues && value === null) { + return false + } + if (hidePostHogPropertiesInTable) { + const isPostHogProperty = key.startsWith('$') || PROPERTY_KEYS.includes(key) + const isNonDollarPostHogProperty = + isCloudOrDev && CLOUD_INTERNAL_POSTHOG_PROPERTY_KEYS.includes(key) + return !isPostHogProperty && !isNonDollarPostHogProperty + } + return true }) } @@ -299,7 +306,7 @@ export function PropertiesTable({ }) } return entries - }, [properties, sortProperties, searchTerm, hidePostHogPropertiesInTable]) + }, [properties, sortProperties, searchTerm, hidePostHogPropertiesInTable, hideNullValues]) if (Array.isArray(properties)) { return ( @@ -424,7 +431,7 @@ export function PropertiesTable({ return ( <> {(searchable || filterable) && ( -
    +
    {searchable && ( )} {filterable && ( - + <> + + + + )} @@ -467,6 +483,7 @@ export function PropertiesTable({ onClick={() => { setSearchTerm('') setHidePostHogPropertiesInTable(false) + setHideNullValues(false) }} > Clear filters diff --git a/frontend/src/lib/components/Support/supportLogic.ts b/frontend/src/lib/components/Support/supportLogic.ts index a9389640cbeb6..3e0f670074598 100644 --- a/frontend/src/lib/components/Support/supportLogic.ts +++ b/frontend/src/lib/components/Support/supportLogic.ts @@ -46,6 +46,29 @@ function getSessionReplayLink(): string { return `\nSession: ${replayUrl}` } +function getErrorTrackingLink(): string { + const filterGroup = encodeURIComponent( + JSON.stringify({ + type: 'AND', + values: [ + { + type: 'AND', + values: [ + { + key: '$session_id', + value: [posthog.get_session_id()], + operator: 'exact', + type: 'event', + }, + ], + }, + ], + }) + ) + + return `\nExceptions: https://us.posthog.com/project/2/error_tracking?filterGroup=${filterGroup}` +} + function getDjangoAdminLink( user: UserType | null, cloudRegion: Region | null | undefined, @@ -178,6 +201,11 @@ export const TARGET_AREA_TO_NAME = [ 'data-attr': `support-form-target-area-web_analytics`, label: 'Web Analytics', }, + { + value: 'error_tracking', + 'data-attr': `support-form-target-area-error_tracking`, + label: 'Error tracking', + }, ], }, ] @@ -211,6 +239,7 @@ export type SupportTicketTargetArea = | 'toolbar' | 'surveys' | 'web_analytics' + | 'error_tracking' export type SupportTicketSeverityLevel = keyof typeof SEVERITY_LEVEL_TO_NAME export type SupportTicketKind = keyof typeof SUPPORT_KIND_TO_SUBJECT @@ -446,6 +475,7 @@ export const supportLogic = kea([ `\nTarget area: ${target_area}` + `\nReport event: http://go/ticketByUUID/${zendesk_ticket_uuid}` + getSessionReplayLink() + + getErrorTrackingLink() + getCurrentLocationLink() + getDjangoAdminLink( userLogic.values.user, diff --git a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts index 103e89b2bfed9..77e7e0e9eb52e 100644 --- a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts +++ b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts @@ -73,35 +73,41 @@ describe('versionCheckerLogic', () => { { versionCount: 1, expectation: null }, { versionCount: 11, + expectation: null, + }, + { + versionCount: 51, expectation: { latestUsedVersion: '1.0.0', - latestAvailableVersion: '1.0.10', - numVersionsBehind: 10, - level: 'info', + latestAvailableVersion: '1.0.50', + numVersionsBehind: 50, + level: 'error', }, }, { - versionCount: 15, + minorUsedVersion: 40, + versionCount: 1, expectation: { latestUsedVersion: '1.0.0', - latestAvailableVersion: '1.0.14', - numVersionsBehind: 14, - level: 'info', + latestAvailableVersion: '1.40.0', + numVersionsBehind: 40, + level: 'warning', }, }, { - versionCount: 25, + majorUsedVersion: 2, + versionCount: 1, expectation: { latestUsedVersion: '1.0.0', - latestAvailableVersion: '1.0.24', - numVersionsBehind: 24, - level: 'error', + latestAvailableVersion: '2.0.0', + numVersionsBehind: 1, + level: 'info', }, }, ])('return a version warning if diff is great enough', async (options) => { // TODO: How do we clear the persisted value? const versionsList = Array.from({ length: options.versionCount }, (_, i) => ({ - version: `1.0.${i}`, + version: `${options.majorUsedVersion || 1}.${options.minorUsedVersion || 0}.${i}`, })).reverse() useMockedVersions( @@ -143,13 +149,14 @@ describe('versionCheckerLogic', () => { }, { usedVersions: [ - { version: '1.80.0', timestamp: '2023-01-01T12:00:00Z' }, - { version: '1.83.1-beta', timestamp: '2023-01-01T10:00:00Z' }, - { version: '1.84.0-delta', timestamp: '2023-01-01T08:00:00Z' }, + { version: '1.40.0', timestamp: '2023-01-01T12:00:00Z' }, + { version: '1.41.1-beta', timestamp: '2023-01-01T10:00:00Z' }, + { version: '1.42.0', timestamp: '2023-01-01T08:00:00Z' }, + { version: '1.42.0-delta', timestamp: '2023-01-01T08:00:00Z' }, ], expectation: { - latestUsedVersion: '1.84.0-delta', - numVersionsBehind: 1, + latestUsedVersion: '1.42.0', + numVersionsBehind: 42, latestAvailableVersion: '1.84.0', level: 'warning', }, diff --git a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts index 7ffecbbf89c82..4c6067adf4afc 100644 --- a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts +++ b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.ts @@ -174,6 +174,7 @@ export const versionCheckerLogic = kea([ if (!warning && sdkVersions && latestAvailableVersion) { const diff = diffVersions(latestAvailableVersion, latestUsedVersion) + if (diff && diff.diff > 0) { // there's a difference between the latest used version and the latest available version @@ -188,18 +189,14 @@ export const versionCheckerLogic = kea([ } let level: 'warning' | 'info' | 'error' | undefined - if (diff.kind === 'major' || numVersionsBehind >= 20) { + if (diff.kind === 'major') { + level = 'info' // it is desirable to be on the latest major version, but not critical + } else if (diff.kind === 'minor') { + level = numVersionsBehind >= 40 ? 'warning' : undefined + } + + if (level === undefined && numVersionsBehind >= 50) { level = 'error' - } else if (diff.kind === 'minor' && diff.diff >= 15) { - level = 'warning' - } else if ((diff.kind === 'minor' && diff.diff >= 10) || numVersionsBehind >= 10) { - level = 'info' - } else if (latestUsedVersion.extra) { - // if we have an extra (alpha/beta/rc/etc.) version, we should always show a warning if they aren't on the latest - level = 'warning' - } else { - // don't warn for a small number of patch versions behind - level = undefined } // we check if there is a "latest user version string" to avoid returning odd data in unexpected cases diff --git a/frontend/src/lib/components/ViewRecordingButton.tsx b/frontend/src/lib/components/ViewRecordingButton.tsx index 1d0c7adb1d4b0..37de0ba0eaae0 100644 --- a/frontend/src/lib/components/ViewRecordingButton.tsx +++ b/frontend/src/lib/components/ViewRecordingButton.tsx @@ -10,20 +10,27 @@ import { EventType } from '~/types' export default function ViewRecordingButton({ sessionId, timestamp, + inModal = false, ...props }: Pick & { sessionId: string timestamp?: string | Dayjs + // whether to open in a modal or navigate to the replay page + inModal?: boolean }): JSX.Element { const { openSessionPlayer } = useActions(sessionPlayerModalLogic) return ( { - const fiveSecondsBeforeEvent = dayjs(timestamp).valueOf() - 5000 - openSessionPlayer({ id: sessionId }, Math.max(fiveSecondsBeforeEvent, 0)) - }} + to={inModal ? undefined : urls.replaySingle(sessionId)} + onClick={ + inModal + ? () => { + const fiveSecondsBeforeEvent = timestamp ? dayjs(timestamp).valueOf() - 5000 : 0 + openSessionPlayer({ id: sessionId }, Math.max(fiveSecondsBeforeEvent, 0)) + } + : undefined + } sideIcon={} {...props} > diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 6becb7998a814..a06134986b9b1 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -165,6 +165,7 @@ export const FEATURE_FLAGS = { PERSON_FEED_CANVAS: 'person-feed-canvas', // owner: #project-canvas FEATURE_FLAG_COHORT_CREATION: 'feature-flag-cohort-creation', // owner: @neilkakkar #team-feature-success INSIGHT_HORIZONTAL_CONTROLS: 'insight-horizontal-controls', // owner: @benjackwhite + SURVEYS_ADAPTIVE_LIMITS: 'surveys-adaptive-limits', // owner: #team-feature-success SURVEYS_WIDGETS: 'surveys-widgets', // owner: #team-feature-success SURVEYS_EVENTS: 'surveys-events', // owner: #team-feature-success SURVEYS_ACTIONS: 'surveys-actions', // owner: #team-feature-success @@ -197,19 +198,14 @@ export const FEATURE_FLAGS = { SETTINGS_BOUNCE_RATE_PAGE_VIEW_MODE: 'settings-bounce-rate-page-view-mode', // owner: @robbie-c ONBOARDING_DASHBOARD_TEMPLATES: 'onboarding-dashboard-templates', // owner: @raquelmsmith MULTIPLE_BREAKDOWNS: 'multiple-breakdowns', // owner: @skoob13 #team-product-analytics - WEB_ANALYTICS_LIVE_USER_COUNT: 'web-analytics-live-user-count', // owner: @robbie-c SETTINGS_SESSION_TABLE_VERSION: 'settings-session-table-version', // owner: @robbie-c INSIGHT_FUNNELS_USE_UDF: 'insight-funnels-use-udf', // owner: @aspicer #team-product-analytics INSIGHT_FUNNELS_USE_UDF_TRENDS: 'insight-funnels-use-udf-trends', // owner: @aspicer #team-product-analytics FIRST_TIME_FOR_USER_MATH: 'first-time-for-user-math', // owner: @skoob13 #team-product-analytics MULTITAB_EDITOR: 'multitab-editor', // owner: @EDsCODE #team-data-warehouse - WEB_ANALYTICS_REPLAY: 'web-analytics-replay', // owner: @robbie-c BATCH_EXPORTS_POSTHOG_HTTP: 'posthog-http-batch-exports', EXPERIMENT_MAKE_DECISION: 'experiment-make-decision', // owner: @jurajmajerik #team-feature-success DATA_MODELING: 'data-modeling', // owner: @EDsCODE #team-data-warehouse - WEB_ANALYTICS_CONVERSION_GOALS: 'web-analytics-conversion-goals', // owner: @robbie-c - WEB_ANALYTICS_LAST_CLICK: 'web-analytics-last-click', // owner: @robbie-c - WEB_ANALYTICS_LCP_SCORE: 'web-analytics-lcp-score', // owner: @robbie-c HEDGEHOG_SKIN_SPIDERHOG: 'hedgehog-skin-spiderhog', // owner: @benjackwhite INSIGHT_VARIABLES: 'insight_variables', // owner: @Gilbert09 #team-data-warehouse WEB_EXPERIMENTS: 'web-experiments', // owner: @team-feature-success @@ -221,24 +217,24 @@ export const FEATURE_FLAGS = { EXPERIMENTS_HOGQL: 'experiments-hogql', // owner: @jurajmajerik #team-experiments ROLE_BASED_ACCESS_CONTROL: 'role-based-access-control', // owner: @zach MESSAGING: 'messaging', // owner @mariusandra #team-cdp - SESSION_REPLAY_URL_BLOCKLIST: 'session-replay-url-blocklist', // owner: @richard-better #team-replay BILLING_TRIAL_FLOW: 'billing-trial-flow', // owner: @zach EDIT_DWH_SOURCE_CONFIG: 'edit_dwh_source_config', // owner: @Gilbert09 #team-data-warehouse AI_SURVEY_RESPONSE_SUMMARY: 'ai-survey-response-summary', // owner: @pauldambra - CUSTOM_CHANNEL_TYPE_RULES: 'custom-channel-type-rules', // owner: @robbie-c #team-web-analytics SELF_SERVE_CREDIT_OVERRIDE: 'self-serve-credit-override', // owner: @zach FEATURE_MANAGEMENT_UI: 'feature-management-ui', // owner: @haven #team-feature-flags CUSTOM_CSS_THEMES: 'custom-css-themes', // owner: @daibhin METALYTICS: 'metalytics', // owner: @surbhi EXPERIMENTS_MULTIPLE_METRICS: 'experiments-multiple-metrics', // owner: @jurajmajerik #team-experiments - WEB_ANALYTICS_WARN_CUSTOM_EVENT_NO_SESSION: 'web-analytics-warn-custom-event-no-session', // owner: @robbie-c #team-web-analytics REMOTE_CONFIG: 'remote-config', // owner: @benjackwhite SITE_DESTINATIONS: 'site-destinations', // owner: @mariusandra #team-cdp SITE_APP_FUNCTIONS: 'site-app-functions', // owner: @mariusandra #team-cdp + HOG_TRANSFORMATIONS: 'hog-transformations', // owner: #team-cdp REPLAY_HOGQL_FILTERS: 'replay-hogql-filters', // owner: @pauldambra #team-replay REPLAY_LIST_RECORDINGS_AS_QUERY: 'replay-list-recordings-as-query', // owner: @pauldambra #team-replay BILLING_SKIP_FORECASTING: 'billing-skip-forecasting', // owner: @zach + EXPERIMENT_STATS_V2: 'experiment-stats-v2', // owner: @danielbachhuber #team-experiments WEB_ANALYTICS_PERIOD_COMPARISON: 'web-analytics-period-comparison', // owner: @rafaeelaudibert #team-web-analytics + WEB_ANALYTICS_CONVERSION_GOAL_FILTERS: 'web-analytics-conversion-goal-filters', // owner: @rafaeelaudibert #team-web-analytics } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/integrations/IntegrationScopesWarning.tsx b/frontend/src/lib/integrations/IntegrationScopesWarning.tsx new file mode 100644 index 0000000000000..c9e6c7a61d764 --- /dev/null +++ b/frontend/src/lib/integrations/IntegrationScopesWarning.tsx @@ -0,0 +1,65 @@ +import api from 'lib/api' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { Link } from 'lib/lemon-ui/Link' +import { useMemo } from 'react' + +import { HogFunctionInputSchemaType, IntegrationType } from '~/types' + +export function IntegrationScopesWarning({ + integration, + schema, +}: { + integration: IntegrationType + schema?: HogFunctionInputSchemaType +}): JSX.Element { + const getScopes = useMemo((): string[] => { + const scopes: any[] = [] + const possibleScopeLocation = [integration.config.scope, integration.config.scopes] + + possibleScopeLocation.map((scope) => { + if (typeof scope === 'string') { + scopes.push(scope.split(' ')) + scopes.push(scope.split(',')) + } + if (typeof scope === 'object') { + scopes.push(scope) + } + }) + return scopes + .filter((scope: any) => typeof scope === 'object') + .reduce((a, b) => (a.length > b.length ? a : b), []) + }, [integration.config]) + + const requiredScopes = schema?.requiredScopes?.split(' ') || [] + const missingScopes = requiredScopes.filter((scope: string) => !getScopes.includes(scope)) + + if (missingScopes.length === 0 || getScopes.length === 0) { + return <> + } + return ( +
    + + Required scopes are missing: [{missingScopes.join(', ')}]. + {integration.kind === 'hubspot' ? ( + + Note that some features may not be available on your current HubSpot plan. Check out{' '} + + this page + {' '} + for more details. + + ) : null} + +
    + ) +} diff --git a/frontend/src/lib/integrations/IntegrationView.tsx b/frontend/src/lib/integrations/IntegrationView.tsx index 31cd12e82eb40..80590299bda4d 100644 --- a/frontend/src/lib/integrations/IntegrationView.tsx +++ b/frontend/src/lib/integrations/IntegrationView.tsx @@ -1,15 +1,18 @@ import { LemonBanner } from '@posthog/lemon-ui' import api from 'lib/api' import { UserActivityIndicator } from 'lib/components/UserActivityIndicator/UserActivityIndicator' +import { IntegrationScopesWarning } from 'lib/integrations/IntegrationScopesWarning' -import { IntegrationType } from '~/types' +import { HogFunctionInputSchemaType, IntegrationType } from '~/types' export function IntegrationView({ integration, suffix, + schema, }: { integration: IntegrationType suffix?: JSX.Element + schema?: HogFunctionInputSchemaType }): JSX.Element { const errors = (integration.errors && integration.errors?.split(',')) || [] @@ -36,7 +39,7 @@ export function IntegrationView({ {suffix}
    - {errors.length > 0 && ( + {errors.length > 0 ? (
    + ) : ( + )}
    ) diff --git a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.scss b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.scss index b97f0e30bcc3f..cf41641c4c8ea 100644 --- a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.scss +++ b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.scss @@ -1,4 +1,5 @@ .LemonCalendar { + --lemon-calendar-month-height: 305px; --lemon-calendar-row-gap: 2px; --lemon-calendar-day-width: 40px; --lemon-calendar-today-radius: 2px; @@ -7,6 +8,9 @@ // Tricky: needs to match the equivalent height button from LemonButton.scss --lemon-calendar-time-button-height: 2.3125rem; + // Force height for month so when swtiching to longer months, the height doesn't change + height: var(--lemon-calendar-month-height); + .LemonCalendar__month { width: 100%; } @@ -23,6 +27,10 @@ } .LemonCalendar__month tr { + &.LemonCalendar__month-header { + height: var(--lemon-calendar-time-button-height); + } + .LemonButton { &.rounded-none { border-radius: 0; diff --git a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.tsx b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.tsx index f955b57a5de06..632cc6c2af229 100644 --- a/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.tsx +++ b/frontend/src/lib/lemon-ui/LemonCalendar/LemonCalendar.tsx @@ -81,7 +81,7 @@ export const LemonCalendar = forwardRef(function LemonCalendar( return ( - +
    {showLeftMonth && ( ([ path(['lib', 'logic', 'userPreferencesLogic']), actions({ setHidePostHogPropertiesInTable: (enabled: boolean) => ({ enabled }), + setHideNullValues: (enabled: boolean) => ({ enabled }), }), reducers(() => ({ hidePostHogPropertiesInTable: [ @@ -16,5 +17,6 @@ export const userPreferencesLogic = kea([ setHidePostHogPropertiesInTable: (_, { enabled }) => enabled, }, ], + hideNullValues: [true, { persist: true }, { setHideNullValues: (_, { enabled }) => enabled }], })), ]) diff --git a/frontend/src/lib/monaco/CodeEditor.tsx b/frontend/src/lib/monaco/CodeEditor.tsx index f824b982d7f63..10dabd94c1bb4 100644 --- a/frontend/src/lib/monaco/CodeEditor.tsx +++ b/frontend/src/lib/monaco/CodeEditor.tsx @@ -16,7 +16,7 @@ import * as monaco from 'monaco-editor' import { useEffect, useMemo, useRef, useState } from 'react' import { themeLogic } from '~/layout/navigation-3000/themeLogic' -import { AnyDataNode, HogLanguage } from '~/queries/schema' +import { AnyDataNode, HogLanguage, HogQLMetadataResponse } from '~/queries/schema' if (loader) { loader.config({ monaco }) @@ -32,7 +32,7 @@ export interface CodeEditorProps extends Omit sourceQuery?: AnyDataNode globals?: Record schema?: Record | null - + onMetadata?: (metadata: HogQLMetadataResponse) => void onError?: (error: string | null, isValidView: boolean) => void } let codeEditorIndex = 0 @@ -121,6 +121,7 @@ export function CodeEditor({ sourceQuery, schema, onError, + onMetadata, ...editorProps }: CodeEditorProps): JSX.Element { const { isDarkModeOn } = useValues(themeLogic) @@ -140,6 +141,7 @@ export function CodeEditor({ monaco: monaco, editor: editor, onError, + onMetadata, }) useMountedLogic(builtCodeEditorLogic) diff --git a/frontend/src/lib/monaco/codeEditorLogic.tsx b/frontend/src/lib/monaco/codeEditorLogic.tsx index 63290fa0012b7..42e95f25209a4 100644 --- a/frontend/src/lib/monaco/codeEditorLogic.tsx +++ b/frontend/src/lib/monaco/codeEditorLogic.tsx @@ -50,6 +50,7 @@ export interface CodeEditorLogicProps { globals?: Record multitab?: boolean onError?: (error: string | null, isValidView: boolean) => void + onMetadata?: (metadata: HogQLMetadataResponse) => void } export const codeEditorLogic = kea([ @@ -100,6 +101,7 @@ export const codeEditorLogic = kea([ variables, }) breakpoint() + props.onMetadata?.(response) return [query, response] }, }, diff --git a/frontend/src/loadPostHogJS.tsx b/frontend/src/loadPostHogJS.tsx index badabf1105246..4dfc4e30ee47f 100644 --- a/frontend/src/loadPostHogJS.tsx +++ b/frontend/src/loadPostHogJS.tsx @@ -67,6 +67,7 @@ export function loadPostHogJS(): void { capture_copied_text: true, }, person_profiles: 'always', + __preview_remote_config: true, // Helper to capture events for assertions in Cypress _onCapture: (window as any)._cypress_posthog_captures diff --git a/frontend/src/mocks/handlers.ts b/frontend/src/mocks/handlers.ts index 9d14c1b3c0acf..de9e072e7f69b 100644 --- a/frontend/src/mocks/handlers.ts +++ b/frontend/src/mocks/handlers.ts @@ -119,6 +119,7 @@ export const defaultMocks: Mocks = { }, }, ], + '/api/users/@me/two_factor_status/': () => [200, { is_enabled: true, backup_codes: [], method: 'TOTP' }], '/api/environments/@current/': MOCK_DEFAULT_TEAM, '/api/projects/@current/': MOCK_DEFAULT_TEAM, '/api/projects/:team_id/comments/count': { count: 0 }, diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts index ba1b6d9478680..f1bbaa4516c55 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts @@ -191,6 +191,10 @@ export const dataNodeLogic = kea([ if (cache.localResults[stringifiedQuery] && !refresh) { return cache.localResults[stringifiedQuery] } + + if (!query.query) { + return null + } } if (!values.currentTeamId) { @@ -337,6 +341,12 @@ export const dataNodeLogic = kea([ ], })), reducers(({ props }) => ({ + isRefresh: [ + false, + { + loadData: (_, { refresh }) => !!refresh, + }, + ], dataLoading: [ false, { @@ -474,8 +484,12 @@ export const dataNodeLogic = kea([ (variablesOverride) => !!variablesOverride, ], isShowingCachedResults: [ - () => [(_, props) => props.cachedResults ?? null, (_, props) => props.query], - (cachedResults: AnyResponseType | null, query: DataNode): boolean => { + (s) => [(_, props) => props.cachedResults ?? null, (_, props) => props.query, s.isRefresh], + (cachedResults: AnyResponseType | null, query: DataNode, isRefresh): boolean => { + if (isRefresh) { + return false + } + return ( !!cachedResults || (cache.localResults && 'query' in query && JSON.stringify(query.query) in cache.localResults) @@ -630,23 +644,26 @@ export const dataNodeLogic = kea([ (s) => [s.nextAllowedRefresh, s.lastRefresh], (nextAllowedRefresh: string | null, lastRefresh: string | null) => (): string => { const now = dayjs() - let disabledReason = '' - if (!!nextAllowedRefresh && now.isBefore(dayjs(nextAllowedRefresh))) { - // If this is a saved insight, the result will contain nextAllowedRefresh, and we use that to disable the button - disabledReason = `You can refresh this insight again ${dayjs(nextAllowedRefresh).from(now)}` - } else if ( - !!lastRefresh && - now.subtract(UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES - 0.5, 'minutes').isBefore(lastRefresh) - ) { - // Unsaved insights don't get cached and get refreshed on every page load, but we avoid allowing users to click - // 'refresh' more than once every UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES. This can be bypassed by simply - // refreshing the page though, as there's no cache layer on the backend - disabledReason = `You can refresh this insight again ${dayjs(lastRefresh) - .add(UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES, 'minutes') - .from(now)}` + // Saved insights has a nextAllowedRefresh we use to check if the user can refresh again + if (nextAllowedRefresh) { + const nextRefreshTime = dayjs(nextAllowedRefresh) + if (now.isBefore(nextRefreshTime)) { + return `You can refresh this insight again ${nextRefreshTime.from(now)}` + } } - - return disabledReason + // For unsaved insights we check the last refresh time + if (lastRefresh) { + const earliestRefresh = dayjs(lastRefresh).add( + UNSAVED_INSIGHT_MIN_REFRESH_INTERVAL_MINUTES, + 'minutes' + ) + if (now.isBefore(earliestRefresh)) { + return `You can refresh this insight again ${earliestRefresh.from(now)}` + } + } + // If we don't have a nextAllowedRefresh or lastRefresh, we can refresh, so we + // return an empty string + return '' }, ], timings: [ diff --git a/frontend/src/queries/nodes/DataTable/EventRowActions.tsx b/frontend/src/queries/nodes/DataTable/EventRowActions.tsx index 5a2830a135ef2..5ac043b46654f 100644 --- a/frontend/src/queries/nodes/DataTable/EventRowActions.tsx +++ b/frontend/src/queries/nodes/DataTable/EventRowActions.tsx @@ -55,6 +55,7 @@ export function EventRowActions({ event }: EventActionProps): JSX.Element { )} props.setQuery?.({ ...props.query, source }), - [props.setQuery] + [props.setQuery, props.query] ) let component: JSX.Element | null = null diff --git a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts index fa9037cdcada9..4480fe9977755 100644 --- a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts @@ -1,7 +1,7 @@ -import { actions, afterMount, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea' +import { actions, afterMount, connect, kea, key, listeners, path, props, propsChanged, reducers, selectors } from 'kea' import { subscriptions } from 'kea-subscriptions' import { dayjs } from 'lib/dayjs' -import { lightenDarkenColor, RGBToHex, uuid } from 'lib/utils' +import { lightenDarkenColor, objectsEqual, RGBToHex, uuid } from 'lib/utils' import mergeObject from 'lodash.merge' import { teamLogic } from 'scenes/teamLogic' @@ -242,6 +242,11 @@ export const dataVisualizationLogic = kea([ ['loadData'], ], })), + propsChanged(({ actions, props }, oldProps) => { + if (props.query && !objectsEqual(props.query, oldProps.query)) { + actions._setQuery(props.query) + } + }), props({ query: { source: {} } } as DataVisualizationLogicProps), actions(({ values }) => ({ setVisualizationType: (visualizationType: ChartDisplayType) => ({ visualizationType }), @@ -280,12 +285,14 @@ export const dataVisualizationLogic = kea([ colorMode: values.isDarkModeOn ? 'dark' : 'light', }), setConditionalFormattingRulesPanelActiveKeys: (keys: string[]) => ({ keys }), + _setQuery: (node: DataVisualizationNode) => ({ node }), })), reducers(({ props }) => ({ query: [ props.query, { setQuery: (_, { node }) => node, + _setQuery: (_, { node }) => node, }, ], visualizationType: [ diff --git a/frontend/src/queries/nodes/EventsNode/EventPropertyFilters.tsx b/frontend/src/queries/nodes/EventsNode/EventPropertyFilters.tsx index d8364d21c2508..d532805db83a5 100644 --- a/frontend/src/queries/nodes/EventsNode/EventPropertyFilters.tsx +++ b/frontend/src/queries/nodes/EventsNode/EventPropertyFilters.tsx @@ -1,7 +1,9 @@ +import { useValues } from 'kea' import { PropertyFilters } from 'lib/components/PropertyFilters/PropertyFilters' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { useState } from 'react' +import { groupsModel } from '~/models/groupsModel' import { EventsNode, EventsQuery, HogQLQuery, SessionAttributionExplorerQuery } from '~/queries/schema' import { isHogQLQuery, isSessionAttributionExplorerQuery } from '~/queries/utils' import { AnyPropertyFilter } from '~/types' @@ -21,6 +23,7 @@ export function EventPropertyFilters< isHogQLQuery(query) || isSessionAttributionExplorerQuery(query) ? query.filters?.properties : query.properties const eventNames = isHogQLQuery(query) || isSessionAttributionExplorerQuery(query) ? [] : query.event ? [query.event] : [] + const { groupsTaxonomicTypes } = useValues(groupsModel) return !properties || Array.isArray(properties) ? ( - loadData(true)}> - Refresh - + + loadData(true)} + className={disabledReason ? 'opacity-50' : ''} + disabledReason={canBypassRefreshDisabled ? '' : disabledReason} + > + Refresh + + )} diff --git a/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx b/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx index 58bcbf6154cde..90f036c734bc4 100644 --- a/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx +++ b/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx @@ -77,7 +77,7 @@ export function EditorFilters({ query, showing, embedded }: EditorFiltersProps): isStepsFunnel || isTrendsFunnel const hasPathsAdvanced = hasAvailableFeature(AvailableFeature.PATHS_ADVANCED) - const hasAttribution = isStepsFunnel + const hasAttribution = isStepsFunnel || isTrendsFunnel const hasPathsHogQL = isPaths && pathsFilter?.includeEventTypes?.includes(PathType.HogQL) const editorFilters: InsightEditorFilterGroup[] = [ diff --git a/frontend/src/queries/nodes/WebOverview/WebOverview.tsx b/frontend/src/queries/nodes/WebOverview/WebOverview.tsx index 925087cd8f9dc..83af7956c55da 100644 --- a/frontend/src/queries/nodes/WebOverview/WebOverview.tsx +++ b/frontend/src/queries/nodes/WebOverview/WebOverview.tsx @@ -2,11 +2,9 @@ import { IconTrending } from '@posthog/icons' import { LemonSkeleton } from '@posthog/lemon-ui' import { useValues } from 'kea' import { getColorVar } from 'lib/colors' -import { FEATURE_FLAGS } from 'lib/constants' import { IconTrendingDown, IconTrendingFlat } from 'lib/lemon-ui/icons' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { Tooltip } from 'lib/lemon-ui/Tooltip' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { humanFriendlyDuration, humanFriendlyLargeNumber, isNotNil, range } from 'lib/utils' import { useState } from 'react' @@ -42,14 +40,13 @@ export function WebOverview(props: { onData, dataNodeCollectionId: dataNodeCollectionId ?? key, }) - const { featureFlags } = useValues(featureFlagLogic) const { response, responseLoading } = useValues(logic) const webOverviewQueryResponse = response as WebOverviewQueryResponse | undefined const samplingRate = webOverviewQueryResponse?.samplingRate - const numSkeletons = props.query.conversionGoal ? 4 : featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_LCP_SCORE] ? 6 : 5 + const numSkeletons = props.query.conversionGoal ? 4 : 6 return ( <> diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index b4d28dbbf3776..b81e4669af38a 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -546,22 +546,6 @@ }, "type": "object" }, - "AssistantCompareFilter": { - "additionalProperties": false, - "properties": { - "compare": { - "default": false, - "description": "Whether to compare the current date range to a previous date range.", - "type": "boolean" - }, - "compare_to": { - "default": "-7d", - "description": "The date range to compare to. The value is a relative date. Examples of relative dates are: `-1y` for 1 year ago, `-14m` for 14 months ago, `-100w` for 100 weeks ago, `-14d` for 14 days ago, `-30h` for 30 hours ago.", - "type": "string" - } - }, - "type": "object" - }, "AssistantDateTimePropertyFilter": { "additionalProperties": false, "properties": { @@ -585,7 +569,7 @@ "type": "string" }, "AssistantEventType": { - "enum": ["status", "message"], + "enum": ["status", "message", "conversation"], "type": "string" }, "AssistantFunnelsBreakdownFilter": { @@ -742,7 +726,7 @@ "description": "Breakdown the chart by a property" }, "dateRange": { - "$ref": "#/definitions/AssistantInsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -1043,27 +1027,11 @@ } ] }, - "AssistantInsightDateRange": { - "additionalProperties": false, - "properties": { - "date_from": { - "default": "-7d", - "description": "Start date. The value can be:\n- a relative date. Examples of relative dates are: `-1y` for 1 year ago, `-14m` for 14 months ago, `-1w` for 1 week ago, `-14d` for 14 days ago, `-30h` for 30 hours ago.\n- an absolute ISO 8601 date string. a constant `yStart` for the current year start. a constant `mStart` for the current month start. a constant `dStart` for the current day start. Prefer using relative dates.", - "type": ["string", "null"] - }, - "date_to": { - "default": null, - "description": "Right boundary of the date range. Use `null` for the current date. You can not use relative dates here.", - "type": ["string", "null"] - } - }, - "type": "object" - }, "AssistantInsightsQueryBase": { "additionalProperties": false, "properties": { "dateRange": { - "$ref": "#/definitions/AssistantInsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -1092,9 +1060,8 @@ "content": { "type": "string" }, - "done": { - "description": "We only need this \"done\" value to tell when the particular message is finished during its streaming. It won't be necessary when we optimize streaming to NOT send the entire message every time a character is added.", - "type": "boolean" + "id": { + "type": "string" }, "type": { "const": "ai", @@ -1365,7 +1332,7 @@ "description": "Compare to date range" }, "dateRange": { - "$ref": "#/definitions/AssistantInsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -1469,8 +1436,25 @@ ], "type": "string" }, + "BaseAssistantMessage": { + "additionalProperties": false, + "properties": { + "id": { + "type": "string" + } + }, + "type": "object" + }, "BaseMathType": { - "enum": ["total", "dau", "weekly_active", "monthly_active", "unique_session", "first_time_for_user"], + "enum": [ + "total", + "dau", + "weekly_active", + "monthly_active", + "unique_session", + "first_time_for_user", + "first_matching_event_for_user" + ], "type": "string" }, "BinCountValue": { @@ -2246,6 +2230,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "timezone": { "type": "string" }, @@ -3792,9 +3779,12 @@ "additionalProperties": false, "properties": { "compare": { + "default": false, + "description": "Whether to compare the current date range to a previous date range.", "type": "boolean" }, "compare_to": { + "description": "The date range to compare to. The value is a relative date. Examples of relative dates are: `-1y` for 1 year ago, `-14m` for 14 months ago, `-100w` for 100 weeks ago, `-14d` for 14 days ago, `-30h` for 30 hours ago.", "type": "string" } }, @@ -4585,6 +4575,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "variants": { "items": { "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" @@ -6090,6 +6083,9 @@ }, "response": { "$ref": "#/definitions/ExperimentTrendsQueryResponse" + }, + "stats_version": { + "type": "integer" } }, "required": ["count_query", "kind"], @@ -6140,6 +6136,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "variants": { "items": { "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" @@ -6200,16 +6199,15 @@ "content": { "type": "string" }, - "done": { - "const": true, - "type": "boolean" + "id": { + "type": "string" }, "type": { "const": "ai/failure", "type": "string" } }, - "required": ["type", "done"], + "required": ["type"], "type": "object" }, "FeaturePropertyFilter": { @@ -6853,7 +6851,7 @@ "description": "Breakdown of the events and actions" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -7245,6 +7243,12 @@ "query": { "type": "string" }, + "table_names": { + "items": { + "type": "string" + }, + "type": "array" + }, "warnings": { "items": { "$ref": "#/definitions/HogQLNotice" @@ -7533,17 +7537,15 @@ "content": { "type": "string" }, - "done": { - "const": true, - "description": "Human messages are only appended when done.", - "type": "boolean" + "id": { + "type": "string" }, "type": { "const": "human", "type": "string" } }, - "required": ["type", "content", "done"], + "required": ["type", "content"], "type": "object" }, "InsightActorsQuery": { @@ -7766,24 +7768,6 @@ }, "type": "object" }, - "InsightDateRange": { - "additionalProperties": false, - "properties": { - "date_from": { - "default": "-7d", - "type": ["string", "null"] - }, - "date_to": { - "type": ["string", "null"] - }, - "explicitDate": { - "default": false, - "description": "Whether the date_from and date_to should be used verbatim. Disables rounding to the start and end of period.", - "type": ["boolean", "null"] - } - }, - "type": "object" - }, "InsightFilter": { "anyOf": [ { @@ -7946,7 +7930,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -8003,7 +7987,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -8060,7 +8044,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -8117,7 +8101,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -8174,7 +8158,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -8284,7 +8268,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -8693,7 +8677,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -9406,6 +9390,12 @@ "query": { "type": "string" }, + "table_names": { + "items": { + "type": "string" + }, + "type": "array" + }, "warnings": { "items": { "$ref": "#/definitions/HogQLNotice" @@ -9855,6 +9845,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "variants": { "items": { "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" @@ -10483,6 +10476,9 @@ "significant": { "type": "boolean" }, + "stats_version": { + "type": "integer" + }, "variants": { "items": { "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" @@ -11137,9 +11133,8 @@ "content": { "type": "string" }, - "done": { - "const": true, - "type": "boolean" + "id": { + "type": "string" }, "substeps": { "items": { @@ -11152,7 +11147,7 @@ "type": "string" } }, - "required": ["type", "content", "done"], + "required": ["type", "content"], "type": "object" }, "RecordingOrder": { @@ -11455,7 +11450,7 @@ "description": "Groups aggregation" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -11599,17 +11594,15 @@ "content": { "type": "string" }, - "done": { - "const": true, - "description": "Router messages are not streamed, so they can only be done.", - "type": "boolean" + "id": { + "type": "string" }, "type": { "const": "ai/router", "type": "string" } }, - "required": ["type", "content", "done"], + "required": ["type", "content"], "type": "object" }, "SamplingRate": { @@ -12127,7 +12120,7 @@ "description": "Compare to date range" }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -12666,8 +12659,19 @@ "$ref": "#/definitions/CompareFilter", "description": "Compare to date range" }, + "conversionGoal": { + "anyOf": [ + { + "$ref": "#/definitions/WebAnalyticsConversionGoal" + }, + { + "type": "null" + } + ], + "description": "Whether we should be comparing against a specific conversion goal" + }, "dateRange": { - "$ref": "#/definitions/InsightDateRange", + "$ref": "#/definitions/DateRange", "description": "Date range for the query" }, "filterTestAccounts": { @@ -12778,8 +12782,11 @@ } ] }, - "done": { - "type": "boolean" + "id": { + "type": "string" + }, + "initiator": { + "type": "string" }, "plan": { "type": "string" @@ -12858,6 +12865,9 @@ "WebExternalClicksTableQuery": { "additionalProperties": false, "properties": { + "compareFilter": { + "$ref": "#/definitions/CompareFilter" + }, "conversionGoal": { "anyOf": [ { @@ -12971,6 +12981,9 @@ "WebGoalsQuery": { "additionalProperties": false, "properties": { + "compareFilter": { + "$ref": "#/definitions/CompareFilter" + }, "conversionGoal": { "anyOf": [ { @@ -13111,14 +13124,7 @@ "additionalProperties": false, "properties": { "compareFilter": { - "anyOf": [ - { - "$ref": "#/definitions/CompareFilter" - }, - { - "type": "null" - } - ] + "$ref": "#/definitions/CompareFilter" }, "conversionGoal": { "anyOf": [ @@ -13250,14 +13256,7 @@ "$ref": "#/definitions/WebStatsBreakdown" }, "compareFilter": { - "anyOf": [ - { - "$ref": "#/definitions/CompareFilter" - }, - { - "type": "null" - } - ] + "$ref": "#/definitions/CompareFilter" }, "conversionGoal": { "anyOf": [ diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index cddaec321af3c..7375910003a3f 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -367,6 +367,7 @@ export interface HogQLMetadataResponse { warnings: HogQLNotice[] notices: HogQLNotice[] query_status?: never + table_names?: string[] } export type AutocompleteCompletionItemKind = @@ -819,7 +820,7 @@ interface InsightVizNodeViewProps { /** Base class for insight query nodes. Should not be used directly. */ export interface InsightsQueryBase> extends Node { /** Date range for the query */ - dateRange?: InsightDateRange + dateRange?: DateRange /** * Exclude internal and test users by applying the respective filters * @@ -910,6 +911,8 @@ export interface TrendsQuery extends InsightsQueryBase { breakdownFilter?: BreakdownFilter /** Compare to date range */ compareFilter?: CompareFilter + /** Whether we should be comparing against a specific conversion goal */ + conversionGoal?: WebAnalyticsConversionGoal | null } export type AssistantArrayPropertyFilterOperator = PropertyOperator.Exact | PropertyOperator.IsNot @@ -1002,31 +1005,11 @@ export type AssistantGroupPropertyFilter = AssistantBasePropertyFilter & { export type AssistantPropertyFilter = AssistantGenericPropertyFilter | AssistantGroupPropertyFilter -export interface AssistantInsightDateRange { - /** - * Start date. The value can be: - * - a relative date. Examples of relative dates are: `-1y` for 1 year ago, `-14m` for 14 months ago, `-1w` for 1 week ago, `-14d` for 14 days ago, `-30h` for 30 hours ago. - * - an absolute ISO 8601 date string. - * a constant `yStart` for the current year start. - * a constant `mStart` for the current month start. - * a constant `dStart` for the current day start. - * Prefer using relative dates. - * @default -7d - */ - date_from?: string | null - - /** - * Right boundary of the date range. Use `null` for the current date. You can not use relative dates here. - * @default null - */ - date_to?: string | null -} - export interface AssistantInsightsQueryBase { /** * Date range for the query */ - dateRange?: AssistantInsightDateRange + dateRange?: DateRange /** * Exclude internal and test users by applying the respective filters @@ -1169,7 +1152,7 @@ export interface AssistantTrendsFilter { yAxisScaleType?: TrendsFilterLegacy['y_axis_scale_type'] } -export interface AssistantCompareFilter { +export interface CompareFilter { /** * Whether to compare the current date range to a previous date range. * @default false @@ -1178,7 +1161,6 @@ export interface AssistantCompareFilter { /** * The date range to compare to. The value is a relative date. Examples of relative dates are: `-1y` for 1 year ago, `-14m` for 14 months ago, `-100w` for 100 weeks ago, `-14d` for 14 days ago, `-30h` for 30 hours ago. - * @default -7d */ compare_to?: string } @@ -1787,6 +1769,7 @@ interface WebAnalyticsQueryBase> extends DataNode< dateRange?: DateRange properties: WebAnalyticsPropertyFilters conversionGoal?: WebAnalyticsConversionGoal | null + compareFilter?: CompareFilter sampling?: { enabled?: boolean forceSamplingRate?: SamplingRate @@ -1798,7 +1781,6 @@ interface WebAnalyticsQueryBase> extends DataNode< export interface WebOverviewQuery extends WebAnalyticsQueryBase { kind: NodeKind.WebOverviewQuery - compareFilter?: CompareFilter | null includeLCPScore?: boolean } @@ -1850,7 +1832,6 @@ export enum WebStatsBreakdown { export interface WebStatsTableQuery extends WebAnalyticsQueryBase { kind: NodeKind.WebStatsTableQuery breakdownBy: WebStatsBreakdown - compareFilter?: CompareFilter | null includeScrollDepth?: boolean // automatically sets includeBounceRate to true includeBounceRate?: boolean doPathCleaning?: boolean @@ -2005,6 +1986,7 @@ export interface ExperimentTrendsQueryResponse { probability: Record significant: boolean significance_code: ExperimentSignificanceCode + stats_version?: integer p_value: number credible_intervals: Record } @@ -2040,6 +2022,7 @@ export interface ExperimentTrendsQuery extends DataNode export interface Breakdown { @@ -2354,11 +2326,6 @@ export interface BreakdownFilter { breakdown_hide_other_aggregation?: boolean | null // hides the "other" field for trends } -export interface CompareFilter { - compare?: boolean - compare_to?: string -} - // TODO: Rename to `DashboardFilters` for consistency with `HogQLFilters` export interface DashboardFilter { date_from?: string | null @@ -2485,48 +2452,41 @@ export enum AssistantMessageType { Router = 'ai/router', } -export interface HumanMessage { +export interface BaseAssistantMessage { + id?: string +} + +export interface HumanMessage extends BaseAssistantMessage { type: AssistantMessageType.Human content: string - /** Human messages are only appended when done. */ - done: true } -export interface AssistantMessage { +export interface AssistantMessage extends BaseAssistantMessage { type: AssistantMessageType.Assistant content: string - /** - * We only need this "done" value to tell when the particular message is finished during its streaming. - * It won't be necessary when we optimize streaming to NOT send the entire message every time a character is added. - */ - done?: boolean } -export interface ReasoningMessage { +export interface ReasoningMessage extends BaseAssistantMessage { type: AssistantMessageType.Reasoning content: string substeps?: string[] - done: true } -export interface VisualizationMessage { +export interface VisualizationMessage extends BaseAssistantMessage { type: AssistantMessageType.Visualization plan?: string answer?: AssistantTrendsQuery | AssistantFunnelsQuery - done?: boolean + initiator?: string } -export interface FailureMessage { +export interface FailureMessage extends BaseAssistantMessage { type: AssistantMessageType.Failure content?: string - done: true } -export interface RouterMessage { +export interface RouterMessage extends BaseAssistantMessage { type: AssistantMessageType.Router content: string - /** Router messages are not streamed, so they can only be done. */ - done: true } export type RootAssistantMessage = @@ -2540,6 +2500,7 @@ export type RootAssistantMessage = export enum AssistantEventType { Status = 'status', Message = 'message', + Conversation = 'conversation', } export enum AssistantGenerationStatusType { diff --git a/frontend/src/scenes/authentication/TwoFactorSetupModal.tsx b/frontend/src/scenes/authentication/TwoFactorSetupModal.tsx index 8da04b39ed0bd..ae63d8649e87d 100644 --- a/frontend/src/scenes/authentication/TwoFactorSetupModal.tsx +++ b/frontend/src/scenes/authentication/TwoFactorSetupModal.tsx @@ -1,35 +1,25 @@ import { useActions, useValues } from 'kea' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { LemonModal } from 'lib/lemon-ui/LemonModal' +import { membersLogic } from 'scenes/organization/membersLogic' +import { userLogic } from 'scenes/userLogic' import { twoFactorLogic } from './twoFactorLogic' import { TwoFactorSetup } from './TwoFactorSetup' -interface TwoFactorSetupModalProps { - onSuccess: () => void - closable?: boolean - required?: boolean - forceOpen?: boolean -} - -export function TwoFactorSetupModal({ - onSuccess, - closable = true, - required = false, - forceOpen = false, -}: TwoFactorSetupModalProps): JSX.Element { - const { isTwoFactorSetupModalOpen } = useValues(twoFactorLogic) - const { toggleTwoFactorSetupModal } = useActions(twoFactorLogic) +export function TwoFactorSetupModal(): JSX.Element { + const { isTwoFactorSetupModalOpen, forceOpenTwoFactorSetupModal } = useValues(twoFactorLogic) + const { closeTwoFactorSetupModal } = useActions(twoFactorLogic) return ( toggleTwoFactorSetupModal(false) : undefined} - closable={closable} + isOpen={isTwoFactorSetupModalOpen || forceOpenTwoFactorSetupModal} + onClose={!forceOpenTwoFactorSetupModal ? () => closeTwoFactorSetupModal() : undefined} + closable={!forceOpenTwoFactorSetupModal} >
    - {required && ( + {forceOpenTwoFactorSetupModal && ( Your organization requires you to set up 2FA. @@ -37,10 +27,9 @@ export function TwoFactorSetupModal({

    Use an authenticator app like Google Authenticator or 1Password to scan the QR code below.

    { - toggleTwoFactorSetupModal(false) - if (onSuccess) { - onSuccess() - } + closeTwoFactorSetupModal() + userLogic.actions.loadUser() + membersLogic.actions.loadAllMembers() }} />
    diff --git a/frontend/src/scenes/authentication/signup/verify-email/verifyEmailLogic.ts b/frontend/src/scenes/authentication/signup/verify-email/verifyEmailLogic.ts index 55ef8dc85375d..b1b15274f4a44 100644 --- a/frontend/src/scenes/authentication/signup/verify-email/verifyEmailLogic.ts +++ b/frontend/src/scenes/authentication/signup/verify-email/verifyEmailLogic.ts @@ -31,7 +31,7 @@ export const verifyEmailLogic = kea([ { validateEmailToken: async ({ uuid, token }: { uuid: string; token: string }, breakpoint) => { try { - await api.create(`api/users/${uuid}/verify_email/`, { token, uuid }) + await api.create(`api/users/verify_email/`, { token, uuid }) actions.setView('success') await breakpoint(2000) window.location.href = '/' @@ -48,7 +48,7 @@ export const verifyEmailLogic = kea([ { requestVerificationLink: async ({ uuid }: { uuid: string }) => { try { - await api.create(`api/users/${uuid}/request_email_verification/`, { uuid }) + await api.create(`api/users/request_email_verification/`, { uuid }) lemonToast.success( 'A new verification link has been sent to the associated email address. Please check your inbox.' ) diff --git a/frontend/src/scenes/authentication/twoFactorLogic.ts b/frontend/src/scenes/authentication/twoFactorLogic.ts index 9f5c1e5eecb5a..37c331b809868 100644 --- a/frontend/src/scenes/authentication/twoFactorLogic.ts +++ b/frontend/src/scenes/authentication/twoFactorLogic.ts @@ -4,7 +4,9 @@ import { forms } from 'kea-forms' import { loaders } from 'kea-loaders' import api from 'lib/api' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { membersLogic } from 'scenes/organization/membersLogic' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' +import { userLogic } from 'scenes/userLogic' import type { twoFactorLogicType } from './twoFactorLogicType' @@ -26,7 +28,8 @@ export const twoFactorLogic = kea([ path(['scenes', 'authentication', 'loginLogic']), props({} as TwoFactorLogicProps), connect({ - values: [preflightLogic, ['preflight'], featureFlagLogic, ['featureFlags']], + values: [preflightLogic, ['preflight'], featureFlagLogic, ['featureFlags'], userLogic, ['user']], + actions: [userLogic, ['loadUser'], membersLogic, ['loadAllMembers']], }), actions({ setGeneralError: (code: string, detail: string) => ({ code, detail }), @@ -34,7 +37,8 @@ export const twoFactorLogic = kea([ loadStatus: true, generateBackupCodes: true, disable2FA: true, - toggleTwoFactorSetupModal: (open: boolean) => ({ open }), + openTwoFactorSetupModal: (forceOpen?: boolean) => ({ forceOpen }), + closeTwoFactorSetupModal: true, toggleDisable2FAModal: (open: boolean) => ({ open }), toggleBackupCodesModal: (open: boolean) => ({ open }), }), @@ -42,7 +46,15 @@ export const twoFactorLogic = kea([ isTwoFactorSetupModalOpen: [ false, { - toggleTwoFactorSetupModal: (_, { open }) => open, + openTwoFactorSetupModal: () => true, + closeTwoFactorSetupModal: () => false, + }, + ], + forceOpenTwoFactorSetupModal: [ + false, + { + openTwoFactorSetupModal: (_, { forceOpen }) => !!forceOpen, + closeTwoFactorSetupModal: () => false, }, ], isDisable2FAModalOpen: [ @@ -88,11 +100,9 @@ export const twoFactorLogic = kea([ startSetup: [ {}, { - toggleTwoFactorSetupModal: async ({ open }, breakpoint) => { - if (open) { - breakpoint() - await api.get('api/users/@me/two_factor_start_setup/') - } + openTwoFactorSetupModal: async (_, breakpoint) => { + breakpoint() + await api.get('api/users/@me/two_factor_start_setup/') return { status: 'completed' } }, }, @@ -143,6 +153,10 @@ export const twoFactorLogic = kea([ await api.create('api/users/@me/two_factor_disable/') lemonToast.success('2FA disabled successfully') actions.loadStatus() + + // Refresh user and members + actions.loadUser() + actions.loadAllMembers() } catch (e) { const { code, detail } = e as Record actions.setGeneralError(code, detail) @@ -152,15 +166,17 @@ export const twoFactorLogic = kea([ generateBackupCodesSuccess: () => { lemonToast.success('Backup codes generated successfully') }, - toggleTwoFactorSetupModal: ({ open }) => { - if (!open) { - // Clear the form when closing the modal - actions.resetToken() - } + closeTwoFactorSetupModal: () => { + // Clear the form when closing the modal + actions.resetToken() }, })), - afterMount(({ actions }) => { + afterMount(({ actions, values }) => { actions.loadStatus() + + if (values.user && values.user.organization?.enforce_2fa && !values.user.is_2fa_enabled) { + actions.openTwoFactorSetupModal(true) + } }), ]) diff --git a/frontend/src/scenes/dashboard/newDashboardLogic.test.tsx b/frontend/src/scenes/dashboard/newDashboardLogic.test.tsx index 03757f853741f..e694c94ea1ad0 100644 --- a/frontend/src/scenes/dashboard/newDashboardLogic.test.tsx +++ b/frontend/src/scenes/dashboard/newDashboardLogic.test.tsx @@ -1,36 +1,46 @@ +import { NodeKind } from '~/queries/schema' + import { applyTemplate } from './newDashboardLogic' describe('template function in newDashboardLogic', () => { it('ignores unused variables', () => { expect( - applyTemplate({ a: 'hello', b: 'hi' }, [ - { - id: 'VARIABLE_1', - name: 'a', - default: { - event: '$pageview', + applyTemplate( + { a: 'hello', b: 'hi' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + event: '$pageview', + }, + description: 'The description of the variable', + required: true, + type: 'event', }, - description: 'The description of the variable', - required: true, - type: 'event', - }, - ]) + ], + null + ) ).toEqual({ a: 'hello', b: 'hi' }) }) it('uses identified variables', () => { expect( - applyTemplate({ a: '{VARIABLE_1}', b: 'hi' }, [ - { - id: 'VARIABLE_1', - name: 'a', - default: { - event: '$pageview', + applyTemplate( + { a: '{VARIABLE_1}', b: 'hi' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + event: '$pageview', + }, + description: 'The description of the variable', + required: true, + type: 'event', }, - description: 'The description of the variable', - required: true, - type: 'event', - }, - ]) + ], + null + ) ).toEqual({ a: { event: '$pageview', @@ -38,4 +48,85 @@ describe('template function in newDashboardLogic', () => { b: 'hi', }) }) + + it('replaces variables in query based tiles', () => { + expect( + applyTemplate( + { a: '{VARIABLE_1}' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + id: '$pageview', + }, + description: 'The description of the variable', + required: true, + type: 'event', + }, + ], + NodeKind.TrendsQuery + ) + ).toEqual({ + a: { + event: '$pageview', + kind: 'EventsNode', + math: 'total', + }, + }) + }) + + it("removes the math property from query based tiles that don't support it", () => { + expect( + applyTemplate( + { a: '{VARIABLE_1}' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + id: '$pageview', + }, + description: 'The description of the variable', + required: true, + type: 'event', + }, + ], + NodeKind.LifecycleQuery + ) + ).toEqual({ + a: { + event: '$pageview', + kind: 'EventsNode', + }, + }) + }) + + it('removes the math property from retention insight tiles', () => { + expect( + applyTemplate( + { a: '{VARIABLE_1}' }, + [ + { + id: 'VARIABLE_1', + name: 'a', + default: { + id: '$pageview', + math: 'dau' as any, + type: 'events' as any, + }, + description: 'The description of the variable', + required: true, + type: 'event', + }, + ], + NodeKind.RetentionQuery + ) + ).toEqual({ + a: { + id: '$pageview', + type: 'events', + }, + }) + }) }) diff --git a/frontend/src/scenes/dashboard/newDashboardLogic.ts b/frontend/src/scenes/dashboard/newDashboardLogic.ts index 6749067872258..564a24f736c1f 100644 --- a/frontend/src/scenes/dashboard/newDashboardLogic.ts +++ b/frontend/src/scenes/dashboard/newDashboardLogic.ts @@ -5,11 +5,15 @@ import api from 'lib/api' import { DashboardRestrictionLevel } from 'lib/constants' import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { dashboardsModel } from '~/models/dashboardsModel' +import { legacyEntityToNode, sanitizeRetentionEntity } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' import { getQueryBasedDashboard } from '~/queries/nodes/InsightViz/utils' +import { NodeKind } from '~/queries/schema' +import { isInsightVizNode } from '~/queries/utils' import { DashboardTemplateType, DashboardTemplateVariableType, DashboardTile, DashboardType, JsonType } from '~/types' import type { newDashboardLogicType } from './newDashboardLogicType' @@ -35,24 +39,47 @@ export interface NewDashboardLogicProps { } // Currently this is a very generic recursive function incase we want to add template variables to aspects beyond events -export function applyTemplate(obj: DashboardTile | JsonType, variables: DashboardTemplateVariableType[]): JsonType { +export function applyTemplate( + obj: DashboardTile | JsonType, + variables: DashboardTemplateVariableType[], + queryKind: NodeKind | null +): JsonType { if (typeof obj === 'string') { if (obj.startsWith('{') && obj.endsWith('}')) { const variableId = obj.substring(1, obj.length - 1) const variable = variables.find((variable) => variable.id === variableId) if (variable && variable.default) { + // added for future compatibility - at the moment we only have event variables + const isEventVariable = variable.type === 'event' + + if (queryKind && isEventVariable) { + let mathAvailability = MathAvailability.None + if (queryKind === NodeKind.TrendsQuery) { + mathAvailability = MathAvailability.All + } else if (queryKind === NodeKind.StickinessQuery) { + mathAvailability = MathAvailability.ActorsOnly + } else if (queryKind === NodeKind.FunnelsQuery) { + mathAvailability = MathAvailability.FunnelsOnly + } + return ( + queryKind === NodeKind.RetentionQuery + ? sanitizeRetentionEntity(variable.default as any) + : legacyEntityToNode(variable.default as any, true, mathAvailability) + ) as JsonType + } + return variable.default as JsonType } return obj } } if (Array.isArray(obj)) { - return obj.map((item) => applyTemplate(item, variables)) + return obj.map((item) => applyTemplate(item, variables, queryKind)) } if (typeof obj === 'object' && obj !== null) { const newObject: JsonType = {} for (const [key, value] of Object.entries(obj)) { - newObject[key] = applyTemplate(value, variables) + newObject[key] = applyTemplate(value, variables, queryKind) } return newObject } @@ -60,7 +87,15 @@ export function applyTemplate(obj: DashboardTile | JsonType, variables: Dashboar } function makeTilesUsingVariables(tiles: DashboardTile[], variables: DashboardTemplateVariableType[]): JsonType[] { - return tiles.map((tile: DashboardTile) => applyTemplate(tile, variables)) + return tiles.map((tile: DashboardTile) => { + const isQueryBased = 'query' in tile && tile.query != null + const queryKind: NodeKind | null = isQueryBased + ? isInsightVizNode(tile.query as any) + ? (tile.query as any)?.source.kind + : (tile.query as any)?.kind + : null + return applyTemplate(tile, variables, queryKind) + }) } export const newDashboardLogic = kea([ diff --git a/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx b/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx index 28974600dfe38..2fd8adf883adb 100644 --- a/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx +++ b/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx @@ -1,7 +1,7 @@ import 'react-data-grid/lib/styles.css' import { IconGear } from '@posthog/icons' -import { LemonButton, LemonTabs, Spinner } from '@posthog/lemon-ui' +import { LemonButton, LemonTabs } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useValues } from 'kea' import { AnimationType } from 'lib/animations/animations' @@ -9,12 +9,13 @@ import { Animation } from 'lib/components/Animation/Animation' import { ExportButton } from 'lib/components/ExportButton/ExportButton' import { useMemo } from 'react' import DataGrid from 'react-data-grid' -import { InsightErrorState } from 'scenes/insights/EmptyStates' +import { InsightErrorState, StatelessInsightLoadingState } from 'scenes/insights/EmptyStates' import { HogQLBoldNumber } from 'scenes/insights/views/BoldNumber/BoldNumber' import { KeyboardShortcut } from '~/layout/navigation-3000/components/KeyboardShortcut' import { themeLogic } from '~/layout/navigation-3000/themeLogic' import { dataNodeLogic } from '~/queries/nodes/DataNode/dataNodeLogic' +import { ElapsedTime } from '~/queries/nodes/DataNode/ElapsedTime' import { LineGraph } from '~/queries/nodes/DataVisualization/Components/Charts/LineGraph' import { SideBar } from '~/queries/nodes/DataVisualization/Components/SideBar' import { Table } from '~/queries/nodes/DataVisualization/Components/Table' @@ -30,22 +31,22 @@ import { ChartDisplayType, ExporterFormat } from '~/types' import { dataWarehouseViewsLogic } from '../saved_queries/dataWarehouseViewsLogic' import { multitabEditorLogic } from './multitabEditorLogic' import { outputPaneLogic, OutputTab } from './outputPaneLogic' +import { InfoTab } from './OutputPaneTabs/InfoTab' export function OutputPane(): JSX.Element { const { activeTab } = useValues(outputPaneLogic) const { setActiveTab } = useActions(outputPaneLogic) const { variablesForInsight } = useValues(variablesLogic) - const { editingView, sourceQuery, exportContext, isValidView, error } = useValues(multitabEditorLogic) - const { saveAsInsight, saveAsView, setSourceQuery } = useActions(multitabEditorLogic) + const { editingView, sourceQuery, exportContext, isValidView, error, editorKey } = useValues(multitabEditorLogic) + const { saveAsInsight, saveAsView, setSourceQuery, runQuery } = useActions(multitabEditorLogic) const { isDarkModeOn } = useValues(themeLogic) - const { response, responseLoading } = useValues(dataNodeLogic) - const { loadData } = useActions(dataNodeLogic) + const { response, responseLoading, responseError, queryId, pollResponse } = useValues(dataNodeLogic) const { dataWarehouseSavedQueriesLoading } = useValues(dataWarehouseViewsLogic) const { updateDataWarehouseSavedQuery } = useActions(dataWarehouseViewsLogic) - const { visualizationType } = useValues(dataVisualizationLogic) + const { visualizationType, queryCancelled } = useValues(dataVisualizationLogic) - const vizKey = `SQLEditorScene` + const vizKey = useMemo(() => `SQLEditorScene`, []) const columns = useMemo(() => { return ( @@ -70,44 +71,6 @@ export function OutputPane(): JSX.Element { }) }, [response]) - const Content = (): JSX.Element | null => { - if (activeTab === OutputTab.Results) { - return responseLoading ? ( - - ) : !response ? ( - Query results will appear here - ) : ( -
    - -
    - ) - } - - if (activeTab === OutputTab.Visualization) { - return !response ? ( - Query be results will be visualized here - ) : ( -
    - -
    - ) - } - - return null - } - return (
    {variablesForInsight.length > 0 && ( @@ -128,6 +91,10 @@ export function OutputPane(): JSX.Element { key: OutputTab.Visualization, label: 'Visualization', }, + { + key: OutputTab.Info, + label: 'Info', + }, ]} />
    @@ -182,15 +149,35 @@ export function OutputPane(): JSX.Element { disabledReason={error ? error : ''} loading={responseLoading} type="primary" - onClick={() => loadData(true)} + onClick={() => runQuery()} > Run
    -
    - +
    + +
    +
    +
    ) @@ -206,8 +193,6 @@ function InternalDataTableVisualization( showResultControls, response, responseLoading, - responseError, - queryCancelled, isChartSettingsPanelOpen, } = useValues(dataVisualizationLogic) @@ -251,29 +236,7 @@ function InternalDataTableVisualization( )} -
    - {visualizationType !== ChartDisplayType.ActionsTable && responseError ? ( -
    - -
    - ) : ( - component - )} -
    +
    {component}
    {showResultControls && ( <> @@ -302,3 +265,110 @@ function InternalDataTableVisualization( ) } + +const ErrorState = ({ responseError, sourceQuery, queryCancelled, response }: any): JSX.Element | null => { + return ( +
    + +
    + ) +} + +const Content = ({ + activeTab, + responseError, + responseLoading, + response, + sourceQuery, + queryCancelled, + columns, + rows, + isDarkModeOn, + vizKey, + setSourceQuery, + exportContext, + saveAsInsight, + queryId, + pollResponse, + editorKey, +}: any): JSX.Element | null => { + if (activeTab === OutputTab.Results) { + if (responseError) { + return ( + + ) + } + + return responseLoading ? ( + + ) : !response ? ( +
    + Query results will appear here +
    + ) : ( +
    + +
    + ) + } + + if (activeTab === OutputTab.Visualization) { + if (responseError) { + return ( + + ) + } + + return !response ? ( +
    + Query results will be visualized here +
    + ) : ( +
    + +
    + ) + } + + if (activeTab === OutputTab.Info) { + return ( +
    + +
    + ) + } + + return null +} diff --git a/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/InfoTab.tsx b/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/InfoTab.tsx new file mode 100644 index 0000000000000..1c3bbe26558cc --- /dev/null +++ b/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/InfoTab.tsx @@ -0,0 +1,111 @@ +import { LemonButton, LemonTag, Tooltip } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { LemonTable } from 'lib/lemon-ui/LemonTable' +import { humanFriendlyDetailedTime } from 'lib/utils' + +import { multitabEditorLogic } from '../multitabEditorLogic' +import { infoTabLogic } from './infoTabLogic' + +interface InfoTabProps { + codeEditorKey: string +} + +export function InfoTab({ codeEditorKey }: InfoTabProps): JSX.Element { + const { sourceTableItems } = useValues(infoTabLogic({ codeEditorKey: codeEditorKey })) + const { editingView, isEditingMaterializedView } = useValues(multitabEditorLogic) + const { runDataWarehouseSavedQuery } = useActions(multitabEditorLogic) + + return ( +
    +
    +
    +

    Materialization

    + BETA +
    +
    + {isEditingMaterializedView ? ( +
    + {editingView?.last_run_at ? ( + `Last run at ${humanFriendlyDetailedTime(editingView.last_run_at)}` + ) : ( +
    + Materialization scheduled +
    + )} + editingView && runDataWarehouseSavedQuery(editingView.id)} + className="mt-2" + type="secondary" + > + Run now + +
    + ) : ( +
    +

    + Materialized views are a way to pre-compute data in your data warehouse. This allows you + to run queries faster and more efficiently. +

    + editingView && runDataWarehouseSavedQuery(editingView.id)} + type="primary" + disabledReason={editingView ? undefined : 'You must save the view first'} + > + Materialize + +
    + )} +
    +
    +
    +

    Dependencies

    +

    + Dependencies are tables that this query uses. See when a source or materialized table was last run. +

    +
    + name, + }, + { + key: 'Type', + title: 'Type', + render: (_, { type }) => type, + }, + { + key: 'Status', + title: 'Status', + render: (_, { type, status }) => { + if (type === 'source') { + return ( + + N/A + + ) + } + return status + }, + }, + { + key: 'Last run at', + title: 'Last run at', + render: (_, { type, last_run_at }) => { + if (type === 'source') { + return ( + + N/A + + ) + } + return humanFriendlyDetailedTime(last_run_at) + }, + }, + ]} + dataSource={sourceTableItems} + /> +
    + ) +} diff --git a/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/infoTabLogic.ts b/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/infoTabLogic.ts new file mode 100644 index 0000000000000..4510e80db6693 --- /dev/null +++ b/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/infoTabLogic.ts @@ -0,0 +1,63 @@ +import { connect, kea, key, path, props, selectors } from 'kea' +import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic' +import { dataWarehouseViewsLogic } from 'scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic' + +import { multitabEditorLogic } from '../multitabEditorLogic' +import type { infoTabLogicType } from './infoTabLogicType' + +export interface InfoTableRow { + name: string + type: 'source' | 'table' + status?: string + last_run_at?: string +} + +export interface InfoTabLogicProps { + codeEditorKey: string +} + +export const infoTabLogic = kea([ + path(['data-warehouse', 'editor', 'outputPaneTabs', 'infoTabLogic']), + props({} as InfoTabLogicProps), + key((props) => props.codeEditorKey), + connect((props: InfoTabLogicProps) => ({ + values: [ + multitabEditorLogic({ key: props.codeEditorKey }), + ['metadata'], + databaseTableListLogic, + ['posthogTablesMap', 'dataWarehouseTablesMap'], + dataWarehouseViewsLogic, + ['dataWarehouseSavedQueryMap'], + ], + })), + selectors({ + sourceTableItems: [ + (s) => [s.metadata, s.dataWarehouseSavedQueryMap], + (metadata, dataWarehouseSavedQueryMap) => { + if (!metadata) { + return [] + } + return ( + metadata.table_names?.map((table_name) => { + const view = dataWarehouseSavedQueryMap[table_name] + if (view) { + return { + name: table_name, + type: 'table', + status: view.status, + last_run_at: view.last_run_at || 'never', + } + } + + return { + name: table_name, + type: 'source', + status: undefined, + last_run_at: undefined, + } + }) || [] + ) + }, + ], + }), +]) diff --git a/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx b/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx index 35a41c0f402b7..85c9d80ef6270 100644 --- a/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx +++ b/frontend/src/scenes/data-warehouse/editor/QueryTabs.tsx @@ -14,7 +14,7 @@ interface QueryTabsProps { export function QueryTabs({ models, onClear, onClick, onAdd, activeModelUri }: QueryTabsProps): JSX.Element { return ( -
    +
    {models.map((model: QueryTab) => ( - +
    +
    + +
    {editingView && (
    - Editing view "{editingView.name}" + + Editing {editingView.status ? 'materialized view' : 'view'} "{editingView.name}" +
    )} { + setMetadata(metadata) + }, }} /> @@ -85,16 +92,13 @@ export function QueryWindow(): JSX.Element { ) } -function InternalQueryWindow(): JSX.Element { +function InternalQueryWindow(): JSX.Element | null { const { cacheLoading, sourceQuery, queryInput } = useValues(multitabEditorLogic) const { setSourceQuery } = useActions(multitabEditorLogic) + // NOTE: hacky way to avoid flicker loading if (cacheLoading) { - return ( -
    - -
    - ) + return null } const dataVisualizationLogicProps: DataVisualizationLogicProps = { diff --git a/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts b/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.tsx similarity index 63% rename from frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts rename to frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.tsx index cfd559e59506a..c45ea5559fb5a 100644 --- a/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts +++ b/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.tsx @@ -1,9 +1,9 @@ +import { Tooltip } from '@posthog/lemon-ui' import Fuse from 'fuse.js' import { connect, kea, path, selectors } from 'kea' import { router } from 'kea-router' import { subscriptions } from 'kea-subscriptions' -import { FEATURE_FLAGS } from 'lib/constants' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { IconCalculate, IconClipboardEdit } from 'lib/lemon-ui/icons' import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic' import { sceneLogic } from 'scenes/sceneLogic' import { Scene } from 'scenes/sceneTypes' @@ -42,20 +42,6 @@ const savedQueriesfuse = new Fuse([], { includeMatches: true, }) -const nonMaterializedViewsfuse = new Fuse([], { - keys: [{ name: 'name', weight: 2 }], - threshold: 0.3, - ignoreLocation: true, - includeMatches: true, -}) - -const materializedViewsfuse = new Fuse([], { - keys: [{ name: 'name', weight: 2 }], - threshold: 0.3, - ignoreLocation: true, - includeMatches: true, -}) - export const editorSidebarLogic = kea([ path(['data-warehouse', 'editor', 'editorSidebarLogic']), connect({ @@ -66,8 +52,6 @@ export const editorSidebarLogic = kea([ ['dataWarehouseSavedQueries', 'dataWarehouseSavedQueryMapById', 'dataWarehouseSavedQueriesLoading'], databaseTableListLogic, ['posthogTables', 'dataWarehouseTables', 'databaseLoading', 'views', 'viewsMapById'], - featureFlagLogic, - ['featureFlags'], ], actions: [ editorSceneLogic, @@ -86,19 +70,13 @@ export const editorSidebarLogic = kea([ s.relevantPosthogTables, s.relevantDataWarehouseTables, s.databaseLoading, - s.relevantNonMaterializedViews, - s.relevantMaterializedViews, - s.featureFlags, ], ( relevantSavedQueries, dataWarehouseSavedQueriesLoading, relevantPosthogTables, relevantDataWarehouseTables, - databaseLoading, - relevantNonMaterializedViews, - relevantMaterializedViews, - featureFlags + databaseLoading ) => [ { key: 'data-warehouse-sources', @@ -163,13 +141,19 @@ export const editorSidebarLogic = kea([ key: 'data-warehouse-views', noun: ['view', 'views'], loading: dataWarehouseSavedQueriesLoading, - items: (featureFlags[FEATURE_FLAGS.DATA_MODELING] - ? relevantNonMaterializedViews - : relevantSavedQueries - ).map(([savedQuery, matches]) => ({ + items: relevantSavedQueries.map(([savedQuery, matches]) => ({ key: savedQuery.id, name: savedQuery.name, url: '', + icon: savedQuery.status ? ( + + + + ) : ( + + + + ), searchMatch: matches ? { matchingFields: matches.map((match) => match.key), @@ -195,16 +179,6 @@ export const editorSidebarLogic = kea([ actions.toggleJoinTableModal() }, }, - ...(featureFlags[FEATURE_FLAGS.DATA_MODELING] && !savedQuery.status - ? [ - { - label: 'Materialize', - onClick: () => { - actions.runDataWarehouseSavedQuery(savedQuery.id) - }, - }, - ] - : []), { label: 'Delete', status: 'danger', @@ -215,63 +189,6 @@ export const editorSidebarLogic = kea([ ], })), } as SidebarCategory, - ...(featureFlags[FEATURE_FLAGS.DATA_MODELING] - ? [ - { - key: 'data-warehouse-materialized-views', - noun: ['materialized view', 'materialized views'], - loading: dataWarehouseSavedQueriesLoading, - items: relevantMaterializedViews.map(([materializedView, matches]) => ({ - key: materializedView.id, - name: materializedView.name, - url: '', - searchMatch: matches - ? { - matchingFields: matches.map((match) => match.key), - nameHighlightRanges: matches.find((match) => match.key === 'name')?.indices, - } - : null, - onClick: () => { - actions.selectSchema(materializedView) - }, - menuItems: [ - { - label: 'Edit view definition', - onClick: () => { - multitabEditorLogic({ - key: `hogQLQueryEditor/${router.values.location.pathname}`, - }).actions.createTab(materializedView.query.query, materializedView) - }, - }, - { - label: 'Add join', - onClick: () => { - actions.selectSourceTable(materializedView.name) - actions.toggleJoinTableModal() - }, - }, - ...(featureFlags[FEATURE_FLAGS.DATA_MODELING] && materializedView.status - ? [ - { - label: 'Run', - onClick: () => { - actions.runDataWarehouseSavedQuery(materializedView.id) - }, - }, - ] - : []), - { - label: 'Delete', - status: 'danger', - onClick: () => { - actions.deleteDataWarehouseSavedQuery(materializedView.id) - }, - }, - ], - })), - }, - ] - : []), ], ], nonMaterializedViews: [ @@ -327,28 +244,6 @@ export const editorSidebarLogic = kea([ return dataWarehouseSavedQueries.map((savedQuery) => [savedQuery, null]) }, ], - relevantNonMaterializedViews: [ - (s) => [s.nonMaterializedViews, navigation3000Logic.selectors.searchTerm], - (nonMaterializedViews, searchTerm): [DataWarehouseSavedQuery, FuseSearchMatch[] | null][] => { - if (searchTerm) { - return nonMaterializedViewsfuse - .search(searchTerm) - .map((result) => [result.item, result.matches as FuseSearchMatch[]]) - } - return nonMaterializedViews.map((view) => [view, null]) - }, - ], - relevantMaterializedViews: [ - (s) => [s.materializedViews, navigation3000Logic.selectors.searchTerm], - (materializedViews, searchTerm): [DataWarehouseSavedQuery, FuseSearchMatch[] | null][] => { - if (searchTerm) { - return materializedViewsfuse - .search(searchTerm) - .map((result) => [result.item, result.matches as FuseSearchMatch[]]) - } - return materializedViews.map((view) => [view, null]) - }, - ], })), subscriptions({ dataWarehouseTables: (dataWarehouseTables) => { diff --git a/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx b/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx index e8960dfa8d81f..94995a446ae2d 100644 --- a/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx +++ b/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx @@ -48,13 +48,18 @@ export const multitabEditorLogic = kea([ connect({ actions: [ dataWarehouseViewsLogic, - ['deleteDataWarehouseSavedQuerySuccess', 'createDataWarehouseSavedQuerySuccess'], + [ + 'loadDataWarehouseSavedQueriesSuccess', + 'deleteDataWarehouseSavedQuerySuccess', + 'createDataWarehouseSavedQuerySuccess', + 'runDataWarehouseSavedQuery', + ], ], }), actions({ setQueryInput: (queryInput: string) => ({ queryInput }), updateState: true, - runQuery: (queryOverride?: string) => ({ queryOverride }), + runQuery: (queryOverride?: string, switchTab?: boolean) => ({ queryOverride, switchTab }), setActiveQuery: (query: string) => ({ query }), setTabs: (tabs: QueryTab[]) => ({ tabs }), addTab: (tab: QueryTab) => ({ tab }), @@ -66,13 +71,13 @@ export const multitabEditorLogic = kea([ initialize: true, saveAsView: true, saveAsViewSubmit: (name: string) => ({ name }), - setMetadata: (query: string, metadata: HogQLMetadataResponse) => ({ query, metadata }), saveAsInsight: true, saveAsInsightSubmit: (name: string) => ({ name }), setCacheLoading: (loading: boolean) => ({ loading }), setError: (error: string | null) => ({ error }), setIsValidView: (isValidView: boolean) => ({ isValidView }), setSourceQuery: (sourceQuery: DataVisualizationNode) => ({ sourceQuery }), + setMetadata: (metadata: HogQLMetadataResponse) => ({ metadata }), editView: (query: string, view: DataWarehouseSavedQuery) => ({ query, view }), }), propsChanged(({ actions, props }, oldProps) => { @@ -80,7 +85,7 @@ export const multitabEditorLogic = kea([ actions.initialize() } }), - reducers({ + reducers(({ props }) => ({ cacheLoading: [ true, { @@ -149,7 +154,14 @@ export const multitabEditorLogic = kea([ setIsValidView: (_, { isValidView }) => isValidView, }, ], - }), + metadata: [ + null as HogQLMetadataResponse | null, + { + setMetadata: (_, { metadata }) => metadata, + }, + ], + editorKey: [props.key], + })), listeners(({ values, props, actions, asyncActions }) => ({ editView: ({ query, view }) => { const maybeExistingTab = values.allTabs.find((tab) => tab.view?.id === view.id) @@ -311,7 +323,7 @@ export const multitabEditorLogic = kea([ }) localStorage.setItem(editorModelsStateKey(props.key), JSON.stringify(queries)) }, - runQuery: ({ queryOverride }) => { + runQuery: ({ queryOverride, switchTab }) => { const query = queryOverride || values.queryInput actions.setSourceQuery({ @@ -328,7 +340,7 @@ export const multitabEditorLogic = kea([ query, }, autoLoad: false, - }).actions.loadData(true) + }).actions.loadData(!switchTab) }, saveAsView: async () => { LemonDialog.openForm({ @@ -388,6 +400,15 @@ export const multitabEditorLogic = kea([ router.actions.push(urls.insightView(insight.short_id)) }, + loadDataWarehouseSavedQueriesSuccess: ({ dataWarehouseSavedQueries }) => { + // keep tab views up to date + const newTabs = values.allTabs.map((tab) => ({ + ...tab, + view: dataWarehouseSavedQueries.find((v) => v.id === tab.view?.id), + })) + actions.setTabs(newTabs) + actions.updateState() + }, deleteDataWarehouseSavedQuerySuccess: ({ payload: viewId }) => { const tabToRemove = values.allTabs.find((tab) => tab.view?.id === viewId) if (tabToRemove) { @@ -412,15 +433,20 @@ export const multitabEditorLogic = kea([ lemonToast.success('View updated') }, })), - subscriptions(({ props, actions }) => ({ + subscriptions(({ props, actions, values }) => ({ activeModelUri: (activeModelUri) => { if (props.monaco) { const _model = props.monaco.editor.getModel(activeModelUri.uri) const val = _model?.getValue() actions.setQueryInput(val ?? '') - actions.runQuery() + actions.runQuery(undefined, true) } }, + allTabs: () => { + // keep selected tab up to date + const activeTab = values.allTabs.find((tab) => tab.uri.path === values.activeModelUri?.uri.path) + activeTab && actions.selectTab(activeTab) + }, })), selectors({ exportContext: [ @@ -435,5 +461,11 @@ export const multitabEditorLogic = kea([ } as ExportContext }, ], + isEditingMaterializedView: [ + (s) => [s.editingView], + (editingView) => { + return !!editingView?.status + }, + ], }), ]) diff --git a/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts b/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts index 659c79b440635..4e06f611dc49d 100644 --- a/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts +++ b/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts @@ -5,6 +5,7 @@ import type { outputPaneLogicType } from './outputPaneLogicType' export enum OutputTab { Results = 'results', Visualization = 'visualization', + Info = 'info', } export const outputPaneLogic = kea([ diff --git a/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx b/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx index d66a0285526ba..ae61570189150 100644 --- a/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx +++ b/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx @@ -70,8 +70,13 @@ export const dataWarehouseViewsLogic = kea([ actions.loadDatabase() }, runDataWarehouseSavedQuery: async ({ viewId }) => { - await api.dataWarehouseSavedQueries.run(viewId) - actions.loadDataWarehouseSavedQueries() + try { + await api.dataWarehouseSavedQueries.run(viewId) + lemonToast.success('Materialization started') + actions.loadDataWarehouseSavedQueries() + } catch (error) { + lemonToast.error(`Failed to run materialization`) + } }, })), selectors({ @@ -92,6 +97,17 @@ export const dataWarehouseViewsLogic = kea([ ) }, ], + dataWarehouseSavedQueryMap: [ + (s) => [s.dataWarehouseSavedQueries], + (dataWarehouseSavedQueries) => { + return ( + dataWarehouseSavedQueries?.reduce((acc, cur) => { + acc[cur.name] = cur + return acc + }, {} as Record) ?? {} + ) + }, + ], }), events(({ actions, cache }) => ({ afterMount: () => { diff --git a/frontend/src/scenes/error-tracking/AlphaAccessScenePrompt.tsx b/frontend/src/scenes/error-tracking/AlphaAccessScenePrompt.tsx new file mode 100644 index 0000000000000..3f9c7b2a50168 --- /dev/null +++ b/frontend/src/scenes/error-tracking/AlphaAccessScenePrompt.tsx @@ -0,0 +1,40 @@ +import { LemonButton } from '@posthog/lemon-ui' +import { useActions } from 'kea' +import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction' +import { supportLogic } from 'lib/components/Support/supportLogic' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' + +import { ProductKey } from '~/types' + +export const AlphaAccessScenePrompt = ({ children }: { children: React.ReactElement }): JSX.Element => { + const hasErrorTracking = useFeatureFlag('ERROR_TRACKING') + const { openSupportForm } = useActions(supportLogic) + + return hasErrorTracking ? ( + children + ) : ( + + openSupportForm({ + target_area: 'error_tracking', + isEmailFormOpen: true, + message: 'Hi\n\nI would like to request access to the error tracking product', + severity_level: 'low', + }) + } + > + Request access + + } + productKey={ProductKey.ERROR_TRACKING} + /> + ) +} diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx index d381d62640a0f..33c8f2af6779e 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx @@ -8,6 +8,7 @@ import { humanFriendlyDetailedTime } from 'lib/utils' import { useEffect, useState } from 'react' import { SceneExport } from 'scenes/sceneTypes' +import { AlphaAccessScenePrompt } from './AlphaAccessScenePrompt' import { errorTrackingSymbolSetLogic } from './errorTrackingSymbolSetLogic' import { SymbolSetUploadModal } from './SymbolSetUploadModal' @@ -25,19 +26,24 @@ export function ErrorTrackingConfigurationScene(): JSX.Element { }, [loadSymbolSets]) return ( -
    -

    Symbol sets

    -

    - Source maps are required to demangle any minified code in your exception stack traces. PostHog - automatically retrieves source maps where possible. Cases where it was not possible are listed below. - Source maps can be uploaded retroactively but changes will only apply to all future exceptions ingested. -

    - {missingSymbolSets.length > 0 && ( - - )} - {validSymbolSets.length > 0 && } - -
    + +
    +

    Symbol sets

    +

    + Source maps are required to demangle any minified code in your exception stack traces. PostHog + automatically retrieves source maps where possible. Cases where it was not possible are listed + below. Source maps can be uploaded retroactively but changes will only apply to all future + exceptions ingested. +

    + {missingSymbolSets.length > 0 && ( + + )} + {(validSymbolSets.length > 0 || missingSymbolSets.length === 0) && ( + + )} + +
    +
    ) } diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingIssueScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingIssueScene.tsx index 7965e2563ae22..33549cf177fa2 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingIssueScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingIssueScene.tsx @@ -8,6 +8,7 @@ import { SceneExport } from 'scenes/sceneTypes' import { ErrorTrackingIssue } from '~/queries/schema' +import { AlphaAccessScenePrompt } from './AlphaAccessScenePrompt' import { AssigneeSelect } from './AssigneeSelect' import ErrorTrackingFilters from './ErrorTrackingFilters' import { errorTrackingIssueSceneLogic } from './errorTrackingIssueSceneLogic' @@ -40,47 +41,52 @@ export function ErrorTrackingIssueScene(): JSX.Element { }, []) return ( - <> - - updateIssue({ assignee })} - type="secondary" - showName - /> -
    - updateIssue({ status: 'archived' })}> - Archive - - updateIssue({ status: 'resolved' })}> - Resolve - + + <> + + updateIssue({ assignee })} + type="secondary" + showName + /> +
    + updateIssue({ status: 'archived' })} + > + Archive + + updateIssue({ status: 'resolved' })}> + Resolve + +
    -
    + ) : ( + updateIssue({ status: 'active' })} + tooltip="Mark as active" + > + {STATUS_LABEL[issue.status]} + + ) ) : ( - updateIssue({ status: 'active' })} - tooltip="Mark as active" - > - {STATUS_LABEL[issue.status]} - + false ) - ) : ( - false - ) - } - /> - - - - - - + } + /> + + + + + + + ) } diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx index 754cb398ec136..d2fb4ffc8ebb7 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx @@ -16,6 +16,7 @@ import { ErrorTrackingIssue } from '~/queries/schema' import { QueryContext, QueryContextColumnComponent, QueryContextColumnTitleComponent } from '~/queries/types' import { InsightLogicProps } from '~/types' +import { AlphaAccessScenePrompt } from './AlphaAccessScenePrompt' import { AssigneeSelect } from './AssigneeSelect' import { errorTrackingDataNodeLogic } from './errorTrackingDataNodeLogic' import ErrorTrackingFilters from './ErrorTrackingFilters' @@ -52,14 +53,16 @@ export function ErrorTrackingScene(): JSX.Element { } return ( - -
    - - - - {selectedIssueIds.length === 0 ? : } - - + + +
    + + + + {selectedIssueIds.length === 0 ? : } + + + ) } diff --git a/frontend/src/scenes/error-tracking/errorTrackingLogic.ts b/frontend/src/scenes/error-tracking/errorTrackingLogic.ts index c1a847a8ab647..260e4e4d08ccb 100644 --- a/frontend/src/scenes/error-tracking/errorTrackingLogic.ts +++ b/frontend/src/scenes/error-tracking/errorTrackingLogic.ts @@ -27,7 +27,9 @@ const customOptions: Record = { all: [lastYear, lastMonth, lastDay], } -const DEFAULT_FILTER_GROUP = { +export const DEFAULT_ERROR_TRACKING_DATE_RANGE = { date_from: '-7d', date_to: null } + +export const DEFAULT_ERROR_TRACKING_FILTER_GROUP = { type: FilterLogicalOperator.And, values: [{ type: FilterLogicalOperator.And, values: [] }], } @@ -42,15 +44,15 @@ export const errorTrackingLogic = kea([ actions({ setDateRange: (dateRange: DateRange) => ({ dateRange }), setAssignee: (assignee: number | null) => ({ assignee }), + setSearchQuery: (searchQuery: string) => ({ searchQuery }), setFilterGroup: (filterGroup: UniversalFiltersGroup) => ({ filterGroup }), setFilterTestAccounts: (filterTestAccounts: boolean) => ({ filterTestAccounts }), - setSearchQuery: (searchQuery: string) => ({ searchQuery }), setSparklineSelectedPeriod: (period: string | null) => ({ period }), _setSparklineOptions: (options: SparklineOption[]) => ({ options }), }), reducers({ dateRange: [ - { date_from: '-7d', date_to: null } as DateRange, + DEFAULT_ERROR_TRACKING_DATE_RANGE as DateRange, { persist: true }, { setDateRange: (_, { dateRange }) => dateRange, @@ -64,7 +66,7 @@ export const errorTrackingLogic = kea([ }, ], filterGroup: [ - DEFAULT_FILTER_GROUP as UniversalFiltersGroup, + DEFAULT_ERROR_TRACKING_FILTER_GROUP as UniversalFiltersGroup, { persist: true }, { setFilterGroup: (_, { filterGroup }) => filterGroup, diff --git a/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts b/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts index bd36ead868256..92dbd6d61dcac 100644 --- a/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts +++ b/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts @@ -1,9 +1,17 @@ +import equal from 'fast-deep-equal' import { actions, connect, kea, path, reducers, selectors } from 'kea' +import { actionToUrl, router, urlToAction } from 'kea-router' import { subscriptions } from 'kea-subscriptions' +import { objectsEqual } from 'lib/utils' +import { Params } from 'scenes/sceneTypes' import { DataTableNode, ErrorTrackingQuery } from '~/queries/schema' -import { errorTrackingLogic } from './errorTrackingLogic' +import { + DEFAULT_ERROR_TRACKING_DATE_RANGE, + DEFAULT_ERROR_TRACKING_FILTER_GROUP, + errorTrackingLogic, +} from './errorTrackingLogic' import type { errorTrackingSceneLogicType } from './errorTrackingSceneLogicType' import { errorTrackingQuery } from './queries' @@ -23,6 +31,10 @@ export const errorTrackingSceneLogic = kea([ 'hasGroupActions', ], ], + actions: [ + errorTrackingLogic, + ['setAssignee', 'setDateRange', 'setFilterGroup', 'setSearchQuery', 'setFilterTestAccounts'], + ], }), actions({ @@ -86,4 +98,79 @@ export const errorTrackingSceneLogic = kea([ subscriptions(({ actions }) => ({ query: () => actions.setSelectedIssueIds([]), })), + + actionToUrl(({ values }) => { + const buildURL = (): [ + string, + Params, + Record, + { + replace: boolean + } + ] => { + const searchParams: Params = { + orderBy: values.orderBy, + filterTestAccounts: values.filterTestAccounts, + } + + if (values.assignee) { + searchParams.assignee = values.assignee + } + if (values.searchQuery) { + searchParams.searchQuery = values.searchQuery + } + if (!objectsEqual(values.filterGroup, DEFAULT_ERROR_TRACKING_FILTER_GROUP)) { + searchParams.filterGroup = values.filterGroup + } + if (!objectsEqual(values.dateRange, DEFAULT_ERROR_TRACKING_DATE_RANGE)) { + searchParams.dateRange = values.dateRange + } + + if (!objectsEqual(searchParams, router.values.searchParams)) { + return [router.values.location.pathname, searchParams, router.values.hashParams, { replace: true }] + } + + return [ + router.values.location.pathname, + router.values.searchParams, + router.values.hashParams, + { replace: false }, + ] + } + + return { + setOrderBy: () => buildURL(), + setAssignee: () => buildURL(), + setDateRange: () => buildURL(), + setFilterGroup: () => buildURL(), + setSearchQuery: () => buildURL(), + setFilterTestAccounts: () => buildURL(), + } + }), + + urlToAction(({ actions, values }) => { + const urlToAction = (_: any, params: Params): void => { + if (params.orderBy && !equal(params.orderBy, values.orderBy)) { + actions.setOrderBy(params.orderBy) + } + if (params.dateRange && !equal(params.dateRange, values.dateRange)) { + actions.setDateRange(params.dateRange) + } + if (params.filterGroup && !equal(params.filterGroup, values.filterGroup)) { + actions.setFilterGroup(params.filterGroup) + } + if (params.filterTestAccounts && !equal(params.filterTestAccounts, values.filterTestAccounts)) { + actions.setFilterTestAccounts(params.filterTestAccounts) + } + if (params.assignee && !equal(params.assignee, values.assignee)) { + actions.setAssignee(params.assignee) + } + if (params.searchQuery && !equal(params.searchQuery, values.searchQuery)) { + actions.setSearchQuery(params.searchQuery) + } + } + return { + '*': urlToAction, + } + }), ]) diff --git a/frontend/src/scenes/experiments/ExperimentForm.tsx b/frontend/src/scenes/experiments/ExperimentForm.tsx index d1fd0b140bc4d..125fb2320ddab 100644 --- a/frontend/src/scenes/experiments/ExperimentForm.tsx +++ b/frontend/src/scenes/experiments/ExperimentForm.tsx @@ -6,6 +6,7 @@ import { useActions, useValues } from 'kea' import { Form, Group } from 'kea-forms' import { ExperimentVariantNumber } from 'lib/components/SeriesGlyph' import { MAX_EXPERIMENT_VARIANTS } from 'lib/constants' +import { groupsAccessLogic, GroupsAccessStatus } from 'lib/introductions/groupsAccessLogic' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonField } from 'lib/lemon-ui/LemonField' import { LemonRadio } from 'lib/lemon-ui/LemonRadio' @@ -20,6 +21,7 @@ const ExperimentFormFields = (): JSX.Element => { const { addExperimentGroup, removeExperimentGroup, setExperiment, createExperiment, setExperimentType } = useActions(experimentLogic) const { webExperimentsAvailable } = useValues(experimentsLogic) + const { groupsAccessStatus } = useValues(groupsAccessLogic) return (
    @@ -103,37 +105,40 @@ const ExperimentFormFields = (): JSX.Element => { />
    )} -
    -

    Participant type

    -
    - The type on which to aggregate metrics. You can change this at any time during the experiment. -
    - - { - const groupTypeIndex = rawGroupTypeIndex !== -1 ? rawGroupTypeIndex : undefined + {groupsAccessStatus === GroupsAccessStatus.AlreadyUsing && ( +
    +

    Participant type

    +
    + The type on which to aggregate metrics. You can change this at any time during the + experiment. +
    + + { + const groupTypeIndex = rawGroupTypeIndex !== -1 ? rawGroupTypeIndex : undefined - setExperiment({ - parameters: { - ...experiment.parameters, - aggregation_group_type_index: groupTypeIndex ?? undefined, - }, - }) - }} - options={[ - { value: -1, label: 'Persons' }, - ...Array.from(groupTypes.values()).map((groupType) => ({ - value: groupType.group_type_index, - label: capitalizeFirstLetter(aggregationLabel(groupType.group_type_index).plural), - })), - ]} - /> -
    + setExperiment({ + parameters: { + ...experiment.parameters, + aggregation_group_type_index: groupTypeIndex ?? undefined, + }, + }) + }} + options={[ + { value: -1, label: 'Persons' }, + ...Array.from(groupTypes.values()).map((groupType) => ({ + value: groupType.group_type_index, + label: capitalizeFirstLetter(aggregationLabel(groupType.group_type_index).plural), + })), + ]} + /> +
    + )}

    Variants

    Add up to 9 variants to test against your control.
    diff --git a/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx b/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx index 575eb84c52708..7f4378a7ec5ef 100644 --- a/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/CumulativeExposuresChart.tsx @@ -1,28 +1,16 @@ import { IconInfo } from '@posthog/icons' import { Tooltip } from '@posthog/lemon-ui' import { useValues } from 'kea' -import { FEATURE_FLAGS } from 'lib/constants' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { InsightEmptyState } from 'scenes/insights/EmptyStates' import { InsightViz } from '~/queries/nodes/InsightViz/InsightViz' -import { queryFromFilters } from '~/queries/nodes/InsightViz/utils' import { CachedExperimentTrendsQueryResponse, InsightQueryNode, InsightVizNode, NodeKind } from '~/queries/schema' -import { - _TrendsExperimentResults, - BaseMathType, - ChartDisplayType, - InsightType, - PropertyFilterType, - PropertyOperator, -} from '~/types' +import { BaseMathType, ChartDisplayType, InsightType, PropertyFilterType, PropertyOperator } from '~/types' import { experimentLogic } from '../experimentLogic' -import { transformResultFilters } from '../utils' export function CumulativeExposuresChart(): JSX.Element { const { experiment, experimentResults, getMetricType } = useValues(experimentLogic) - const { featureFlags } = useValues(featureFlagLogic) const metricIdx = 0 const metricType = getMetricType(metricIdx) @@ -32,99 +20,52 @@ export function CumulativeExposuresChart(): JSX.Element { variants.push(`holdout-${experiment.holdout.id}`) } - let query + let query: InsightVizNode - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - if (metricType === InsightType.TRENDS) { - query = { - kind: NodeKind.InsightVizNode, - source: (experimentResults as CachedExperimentTrendsQueryResponse).exposure_query, - } - } else { - query = { - kind: NodeKind.InsightVizNode, - source: { - kind: NodeKind.TrendsQuery, - dateRange: { - date_from: experiment.start_date, - date_to: experiment.end_date, - }, - interval: 'day', - trendsFilter: { - display: ChartDisplayType.ActionsLineGraphCumulative, - showLegend: false, - smoothingIntervals: 1, - }, - series: [ - { - kind: NodeKind.EventsNode, - event: experiment.filters?.events?.[0]?.name, - math: BaseMathType.UniqueUsers, - properties: [ - { - key: `$feature/${experiment.feature_flag_key}`, - value: variants, - operator: PropertyOperator.Exact, - type: PropertyFilterType.Event, - }, - ], - }, - ], - breakdownFilter: { - breakdown: `$feature/${experiment.feature_flag_key}`, - breakdown_type: 'event', - }, - }, - } + if (metricType === InsightType.TRENDS) { + query = { + kind: NodeKind.InsightVizNode, + source: (experimentResults as CachedExperimentTrendsQueryResponse)?.exposure_query || { + kind: NodeKind.TrendsQuery, + series: [], + interval: 'day', + }, } } else { - if (metricType === InsightType.TRENDS && experiment.parameters?.custom_exposure_filter) { - const trendResults = experimentResults as _TrendsExperimentResults - const queryFilters = { - ...trendResults.exposure_filters, - display: ChartDisplayType.ActionsLineGraphCumulative, - } as _TrendsExperimentResults['exposure_filters'] - query = queryFromFilters(transformResultFilters(queryFilters)) - } else { - query = { - kind: NodeKind.InsightVizNode, - source: { - kind: NodeKind.TrendsQuery, - dateRange: { - date_from: experiment.start_date, - date_to: experiment.end_date, - }, - interval: 'day', - trendsFilter: { - display: ChartDisplayType.ActionsLineGraphCumulative, - showLegend: false, - smoothingIntervals: 1, - }, - series: [ - { - kind: NodeKind.EventsNode, - event: - metricType === InsightType.TRENDS - ? '$feature_flag_called' - : experiment.filters?.events?.[0]?.name, - math: BaseMathType.UniqueUsers, - properties: [ - { - key: `$feature/${experiment.feature_flag_key}`, - value: variants, - operator: PropertyOperator.Exact, - type: PropertyFilterType.Event, - }, - ], - }, - ], - breakdownFilter: { - breakdown: `$feature/${experiment.feature_flag_key}`, - breakdown_type: 'event', + query = { + kind: NodeKind.InsightVizNode, + source: { + kind: NodeKind.TrendsQuery, + dateRange: { + date_from: experiment.start_date, + date_to: experiment.end_date, + }, + interval: 'day', + trendsFilter: { + display: ChartDisplayType.ActionsLineGraphCumulative, + showLegend: false, + smoothingIntervals: 1, + }, + series: [ + { + kind: NodeKind.EventsNode, + event: experiment.filters?.events?.[0]?.name, + math: BaseMathType.UniqueUsers, + properties: [ + { + key: `$feature/${experiment.feature_flag_key}`, + value: variants, + operator: PropertyOperator.Exact, + type: PropertyFilterType.Event, + }, + ], }, + ], + breakdownFilter: { + breakdown: `$feature/${experiment.feature_flag_key}`, + breakdown_type: 'event', }, - } + }, } } @@ -139,7 +80,7 @@ export function CumulativeExposuresChart(): JSX.Element { {experiment.start_date ? ( ), + ...query, showTable: true, }} setQuery={() => {}} diff --git a/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx b/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx index 95938242c143d..e7797f03de7ba 100644 --- a/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx @@ -3,10 +3,10 @@ import { LemonBanner, LemonInput, Link, Tooltip } from '@posthog/lemon-ui' import { BindLogic, useActions, useValues } from 'kea' import { LemonSlider } from 'lib/lemon-ui/LemonSlider' import { humanFriendlyNumber } from 'lib/utils' -import { insightDataLogic } from 'scenes/insights/insightDataLogic' import { insightLogic } from 'scenes/insights/insightLogic' import { Query } from '~/queries/Query/Query' +import { ExperimentFunnelsQuery, ExperimentTrendsQuery, NodeKind } from '~/queries/schema' import { ExperimentIdType, InsightType } from '~/types' import { MetricInsightId } from '../constants' @@ -120,7 +120,16 @@ export function DataCollectionCalculator({ experimentId }: ExperimentCalculatorP syncWithUrl: false, }) const { insightProps } = useValues(insightLogicInstance) - const { query } = useValues(insightDataLogic(insightProps)) + let query = null + if (experiment.metrics.length > 0) { + query = { + kind: NodeKind.InsightVizNode, + source: + metricType === InsightType.FUNNELS + ? (experiment.metrics[0] as ExperimentFunnelsQuery).funnels_query + : (experiment.metrics[0] as ExperimentTrendsQuery).count_query, + } + } const funnelConversionRate = conversionMetrics?.totalRate * 100 || 0 diff --git a/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx b/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx index 30d6b93ea8278..5ebf192769a2d 100644 --- a/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx @@ -64,7 +64,12 @@ export function DistributionModal({ experimentId }: { experimentId: Experiment[' { saveSidebarExperimentFeatureFlag(featureFlag) - updateExperiment({ holdout_id: experiment.holdout_id }) + updateExperiment({ + holdout_id: experiment.holdout_id, + parameters: { + feature_flag_variants: featureFlag?.filters?.multivariate?.variants ?? [], + }, + }) closeDistributionModal() }} type="primary" diff --git a/frontend/src/scenes/experiments/ExperimentView/Goal.tsx b/frontend/src/scenes/experiments/ExperimentView/Goal.tsx index 867d82bc83a37..59766e480529f 100644 --- a/frontend/src/scenes/experiments/ExperimentView/Goal.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/Goal.tsx @@ -250,6 +250,12 @@ export function Goal(): JSX.Element { const [isModalOpen, setIsModalOpen] = useState(false) const metricType = getMetricType(0) + // :FLAG: CLEAN UP AFTER MIGRATION + const isDataWarehouseMetric = + featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL] && + metricType === InsightType.TRENDS && + (experiment.metrics[0] as ExperimentTrendsQuery).count_query?.series[0].kind === NodeKind.DataWarehouseNode + return (
    @@ -322,16 +328,18 @@ export function Goal(): JSX.Element { Change goal
    - {metricType === InsightType.TRENDS && !experimentMathAggregationForTrends() && ( - <> - -
    -
    - + {metricType === InsightType.TRENDS && + !experimentMathAggregationForTrends() && + !isDataWarehouseMetric && ( + <> + +
    +
    + +
    -
    - - )} + + )}
    )} = [ { @@ -149,16 +136,18 @@ export function SecondaryMetricsTable({ experimentId }: { experimentId: Experime ), render: function Key(_, item: TabularSecondaryMetricResults): JSX.Element { const { variant } = item - return
    {targetResults ? countDataForVariant(targetResults, variant) : '—'}
    + const count = targetResults ? countDataForVariant(targetResults, variant) : null + return
    {count === null ? '—' : humanFriendlyNumber(count)}
    }, }, { title: 'Exposure', render: function Key(_, item: TabularSecondaryMetricResults): JSX.Element { const { variant } = item - return ( -
    {targetResults ? exposureCountDataForVariant(targetResults, variant) : '—'}
    - ) + const exposureCount = targetResults + ? exposureCountDataForVariant(targetResults, variant) + : null + return
    {exposureCount === null ? '—' : humanFriendlyNumber(exposureCount)}
    }, }, { @@ -337,7 +326,7 @@ const AddSecondaryMetricButton = ({ metrics: any openEditModal: (metricIdx: number) => void }): JSX.Element => { - const { experiment, featureFlags } = useValues(experimentLogic({ experimentId })) + const { experiment } = useValues(experimentLogic({ experimentId })) const { setExperiment } = useActions(experimentLogic({ experimentId })) return ( { - // :FLAG: CLEAN UP AFTER MIGRATION - if (featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOGQL]) { - const newMetricsSecondary = [...experiment.metrics_secondary, getDefaultFunnelsMetric()] - setExperiment({ - metrics_secondary: newMetricsSecondary, - }) - openEditModal(newMetricsSecondary.length - 1) - } else { - const newSecondaryMetrics = [ - ...experiment.secondary_metrics, - { - name: '', - filters: getDefaultFilters(InsightType.FUNNELS, undefined), - }, - ] - setExperiment({ - secondary_metrics: newSecondaryMetrics, - }) - openEditModal(newSecondaryMetrics.length - 1) - } + const newMetricsSecondary = [...experiment.metrics_secondary, getDefaultFunnelsMetric()] + setExperiment({ + metrics_secondary: newMetricsSecondary, + }) + openEditModal(newMetricsSecondary.length - 1) }} disabledReason={ metrics.length >= MAX_SECONDARY_METRICS diff --git a/frontend/src/scenes/experiments/ExperimentView/components.tsx b/frontend/src/scenes/experiments/ExperimentView/components.tsx index c24376ac7e67c..df8580fee68dd 100644 --- a/frontend/src/scenes/experiments/ExperimentView/components.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/components.tsx @@ -493,44 +493,38 @@ export function PageHeaderCustom(): JSX.Element { )} {experiment && isExperimentRunning && (
    - {!isExperimentStopped && !experiment.archived && ( - <> - - - exposureCohortId ? undefined : createExposureCohort() - } - fullWidth - data-attr={`${ - exposureCohortId ? 'view' : 'create' - }-exposure-cohort`} - to={exposureCohortId ? urls.cohort(exposureCohortId) : undefined} - targetBlank={!!exposureCohortId} - > - {exposureCohortId ? 'View' : 'Create'} exposure cohort - - loadExperimentResults(true)} - fullWidth - data-attr="refresh-experiment" - > - Refresh experiment results - - loadSecondaryMetricResults(true)} - fullWidth - data-attr="refresh-secondary-metrics" - > - Refresh secondary metrics - - - } - /> - - - )} + <> + + (exposureCohortId ? undefined : createExposureCohort())} + fullWidth + data-attr={`${exposureCohortId ? 'view' : 'create'}-exposure-cohort`} + to={exposureCohortId ? urls.cohort(exposureCohortId) : undefined} + targetBlank={!!exposureCohortId} + > + {exposureCohortId ? 'View' : 'Create'} exposure cohort + + loadExperimentResults(true)} + fullWidth + data-attr="refresh-experiment" + > + Refresh experiment results + + loadSecondaryMetricResults(true)} + fullWidth + data-attr="refresh-secondary-metrics" + > + Refresh secondary metrics + + + } + /> + + {!experiment.end_date && ( ([ ...values.experiment.metrics[0], experiment_id: values.experimentId, } + if ( + queryWithExperimentId.kind === NodeKind.ExperimentTrendsQuery && + values.featureFlags[FEATURE_FLAGS.EXPERIMENT_STATS_V2] + ) { + queryWithExperimentId.stats_version = 2 + } const response = await performQuery(queryWithExperimentId, undefined, refresh) diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx index f673e27c4b401..d7e2ad01c9133 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx @@ -70,6 +70,7 @@ import FeatureFlagProjects from './FeatureFlagProjects' import { FeatureFlagReleaseConditions } from './FeatureFlagReleaseConditions' import FeatureFlagSchedule from './FeatureFlagSchedule' import { featureFlagsLogic, FeatureFlagsTab } from './featureFlagsLogic' +import { FeatureFlagStatusIndicator } from './FeatureFlagStatusIndicator' import { RecentFeatureFlagInsights } from './RecentFeatureFlagInsightsCard' export const scene: SceneExport = { @@ -382,7 +383,8 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element {
    If your feature flag is applied before identifying the user, use this to ensure that the flag value remains consistent for the same user. - Depending on your setup, this option might not always be suitable.{' '} + Depending on your setup, this option might not always be suitable. This + feature requires creating profiles for anonymous users.{' '} ) : ( - { - LemonDialog.open({ - title: `${newValue === true ? 'Enable' : 'Disable'} this flag?`, - description: `This flag will be immediately ${ - newValue === true ? 'rolled out to' : 'rolled back from' - } the users matching the release conditions.`, - primaryButton: { - children: 'Confirm', - type: 'primary', - onClick: () => { - const updatedFlag = { ...featureFlag, active: newValue } - setFeatureFlag(updatedFlag) - saveFeatureFlag(updatedFlag) +
    + { + LemonDialog.open({ + title: `${newValue === true ? 'Enable' : 'Disable'} this flag?`, + description: `This flag will be immediately ${ + newValue === true ? 'rolled out to' : 'rolled back from' + } the users matching the release conditions.`, + primaryButton: { + children: 'Confirm', + type: 'primary', + onClick: () => { + const updatedFlag = { ...featureFlag, active: newValue } + setFeatureFlag(updatedFlag) + saveFeatureFlag(updatedFlag) + }, + size: 'small', }, - size: 'small', - }, - secondaryButton: { - children: 'Cancel', - type: 'tertiary', - size: 'small', - }, - }) - }} - label="Enabled" - disabledReason={ - !featureFlag.can_edit - ? "You only have view access to this feature flag. To make changes, contact the flag's creator." - : null - } - checked={featureFlag.active} - /> + secondaryButton: { + children: 'Cancel', + type: 'tertiary', + size: 'small', + }, + }) + }} + label="Enabled" + disabledReason={ + !featureFlag.can_edit + ? "You only have view access to this feature flag. To make changes, contact the flag's creator." + : null + } + checked={featureFlag.active} + /> + +
    )}
    diff --git a/frontend/src/scenes/feature-flags/FeatureFlagStatusIndicator.tsx b/frontend/src/scenes/feature-flags/FeatureFlagStatusIndicator.tsx new file mode 100644 index 0000000000000..7b2ee5fc7fa14 --- /dev/null +++ b/frontend/src/scenes/feature-flags/FeatureFlagStatusIndicator.tsx @@ -0,0 +1,45 @@ +import { LemonTag } from 'lib/lemon-ui/LemonTag' +import { Tooltip } from 'lib/lemon-ui/Tooltip' + +import { FeatureFlagStatus, FeatureFlagStatusResponse } from '~/types' + +export function FeatureFlagStatusIndicator({ + flagStatus, +}: { + flagStatus: FeatureFlagStatusResponse | null +}): JSX.Element | null { + if ( + !flagStatus || + [ + FeatureFlagStatus.ACTIVE, + FeatureFlagStatus.INACTIVE, + FeatureFlagStatus.DELETED, + FeatureFlagStatus.UNKNOWN, + ].includes(flagStatus.status) + ) { + return null + } + + return ( + +
    {flagStatus.reason}
    +
    + {flagStatus.status === FeatureFlagStatus.STALE && + 'Make sure to remove any references to this flag in your code before deleting it.'} + {flagStatus.status === FeatureFlagStatus.INACTIVE && + 'It is probably not being used in your code, but be sure to remove any references to this flag before deleting it.'} +
    + + } + placement="right" + > + + + {flagStatus.status} + + +
    + ) +} diff --git a/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx b/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx index acf32b9788ed5..b929e2d203f19 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx @@ -9,6 +9,7 @@ import { mswDecorator } from '~/mocks/browser' import featureFlags from './__mocks__/feature_flags.json' const meta: Meta = { + tags: ['ff'], title: 'Scenes-App/Feature Flags', parameters: { layout: 'fullscreen', @@ -33,6 +34,13 @@ const meta: Meta = { 200, featureFlags.results.find((r) => r.id === Number(req.params['flagId'])), ], + '/api/projects/:team_id/feature_flags/:flagId/status': () => [ + 200, + { + status: 'active', + reason: 'Feature flag is active', + }, + ], }, post: { '/api/environments/:team_id/query': {}, diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 3b4f69787fc40..978348e795149 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -33,6 +33,7 @@ import { EarlyAccessFeatureType, FeatureFlagGroupType, FeatureFlagRollbackConditions, + FeatureFlagStatusResponse, FeatureFlagType, FilterLogicalOperator, FilterType, @@ -755,6 +756,18 @@ export const featureFlagLogic = kea([ } }, }, + flagStatus: [ + null as FeatureFlagStatusResponse | null, + { + loadFeatureFlagStatus: () => { + const { currentTeamId } = values + if (currentTeamId && props.id && props.id !== 'new' && props.id !== 'link') { + return api.featureFlags.getStatus(currentTeamId, props.id) + } + return null + }, + }, + ], })), listeners(({ actions, values, props }) => ({ submitNewDashboardSuccessWithResult: async ({ result }) => { @@ -1040,8 +1053,10 @@ export const featureFlagLogic = kea([ actions.setFeatureFlag(formatPayloadsWithFlag) actions.loadRelatedInsights() actions.loadAllInsightsForFlag() + actions.loadFeatureFlagStatus() } else if (props.id !== 'new') { actions.loadFeatureFlag() + actions.loadFeatureFlagStatus() } }), ]) diff --git a/frontend/src/scenes/funnels/FunnelHistogram.tsx b/frontend/src/scenes/funnels/FunnelHistogram.tsx index 1709b0eb514b4..39a157b7b878e 100644 --- a/frontend/src/scenes/funnels/FunnelHistogram.tsx +++ b/frontend/src/scenes/funnels/FunnelHistogram.tsx @@ -29,6 +29,7 @@ export function FunnelHistogram(): JSX.Element | null {
    ([ if ( // TODO: Ideally we don't check filters anymore, but tests are still using this insightData?.filters?.insight !== InsightType.FUNNELS && + querySource && querySource?.kind !== NodeKind.FunnelsQuery ) { return [] @@ -275,6 +276,7 @@ export const funnelDataLogic = kea([ if ( // TODO: Ideally we don't check filters anymore, but tests are still using this insightData?.filters?.insight !== InsightType.FUNNELS && + querySource && querySource?.kind !== NodeKind.FunnelsQuery ) { return false diff --git a/frontend/src/scenes/groups/Group.tsx b/frontend/src/scenes/groups/Group.tsx index ccd14aec76ce2..0aaf07f9a9cde 100644 --- a/frontend/src/scenes/groups/Group.tsx +++ b/frontend/src/scenes/groups/Group.tsx @@ -6,11 +6,13 @@ import { PageHeader } from 'lib/components/PageHeader' import { PropertiesTable } from 'lib/components/PropertiesTable' import { TZLabel } from 'lib/components/TZLabel' import { isEventFilter } from 'lib/components/UniversalFilters/utils' +import { FEATURE_FLAGS } from 'lib/constants' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { LemonTabs } from 'lib/lemon-ui/LemonTabs' import { lemonToast } from 'lib/lemon-ui/LemonToast' import { Link } from 'lib/lemon-ui/Link' import { Spinner, SpinnerOverlay } from 'lib/lemon-ui/Spinner/Spinner' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { GroupDashboard } from 'scenes/groups/GroupDashboard' import { groupLogic, GroupLogicProps } from 'scenes/groups/groupLogic' import { RelatedGroups } from 'scenes/groups/RelatedGroups' @@ -86,11 +88,14 @@ export function Group(): JSX.Element { const { groupKey, groupTypeIndex } = logicProps const { setGroupEventsQuery } = useActions(groupLogic) const { currentTeam } = useValues(teamLogic) + const { featureFlags } = useValues(featureFlagLogic) if (!groupData || !groupType) { return groupDataLoading ? : } + const settingLevel = featureFlags[FEATURE_FLAGS.ENVIRONMENTS] ? 'environment' : 'project' + return ( <> - Session recordings are currently disabled for this project. To use this - feature, please go to your{' '} + Session recordings are currently disabled for this {settingLevel}. To use + this feature, please go to your{' '} project settings{' '} and enable it. diff --git a/frontend/src/scenes/insights/EmptyStates/EmptyStates.scss b/frontend/src/scenes/insights/EmptyStates/EmptyStates.scss index ddae1edbb3113..de2ede18a2fb8 100644 --- a/frontend/src/scenes/insights/EmptyStates/EmptyStates.scss +++ b/frontend/src/scenes/insights/EmptyStates/EmptyStates.scss @@ -38,7 +38,9 @@ } h2 { + width: 100%; text-align: center; + word-wrap: break-word; } ol { diff --git a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx index 06f0928dc54f3..da29f417bf1f3 100644 --- a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx +++ b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx @@ -31,7 +31,7 @@ import { urls } from 'scenes/urls' import { actionsAndEventsToSeries } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' import { seriesToActionsAndEvents } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' -import { FunnelsQuery, Node } from '~/queries/schema' +import { FunnelsQuery, Node, QueryStatus } from '~/queries/schema' import { FilterType, InsightLogicProps, SavedInsightsTabs } from '~/types' import { samplingFilterLogic } from '../EditorFilters/samplingFilterLogic' @@ -82,25 +82,22 @@ function humanFileSize(size: number): string { return (+(size / Math.pow(1024, i))).toFixed(2) + ' ' + ['B', 'kB', 'MB', 'GB', 'TB'][i] } -export function InsightLoadingState({ +export function StatelessInsightLoadingState({ queryId, - insightProps, + pollResponse, + suggestion, }: { queryId?: string | null - insightProps: InsightLogicProps + pollResponse?: Record | null + suggestion?: JSX.Element }): JSX.Element { - const { suggestedSamplingPercentage, samplingPercentage } = useValues(samplingFilterLogic(insightProps)) - const { insightPollResponse } = useValues(insightDataLogic(insightProps)) - - const { currentTeam } = useValues(teamLogic) - const [rowsRead, setRowsRead] = useState(0) const [bytesRead, setBytesRead] = useState(0) const [secondsElapsed, setSecondsElapsed] = useState(0) useEffect(() => { - const status = insightPollResponse?.status?.query_progress - const previousStatus = insightPollResponse?.previousStatus?.query_progress + const status = pollResponse?.status?.query_progress + const previousStatus = pollResponse?.previousStatus?.query_progress setRowsRead(previousStatus?.rows_read || 0) setBytesRead(previousStatus?.bytes_read || 0) const interval = setInterval(() => { @@ -113,21 +110,21 @@ export function InsightLoadingState({ return Math.min(bytesRead + diff / 30, status?.bytes_read || 0) }) setSecondsElapsed(() => { - return dayjs().diff(dayjs(insightPollResponse?.status?.start_time), 'milliseconds') + return dayjs().diff(dayjs(pollResponse?.status?.start_time), 'milliseconds') }) }, 100) return () => clearInterval(interval) - }, [insightPollResponse]) + }, [pollResponse]) + const bytesPerSecond = (bytesRead / (secondsElapsed || 1)) * 1000 - const estimatedRows = insightPollResponse?.status?.query_progress?.estimated_rows_total + const estimatedRows = pollResponse?.status?.query_progress?.estimated_rows_total const cpuUtilization = - (insightPollResponse?.status?.query_progress?.active_cpu_time || 0) / - (insightPollResponse?.status?.query_progress?.time_elapsed || 1) / + (pollResponse?.status?.query_progress?.active_cpu_time || 0) / + (pollResponse?.status?.query_progress?.time_elapsed || 1) / 10000 - currentTeam?.modifiers?.personsOnEventsMode ?? currentTeam?.default_modifiers?.personsOnEventsMode ?? 'disabled' return (
    @@ -148,37 +145,14 @@ export function InsightLoadingState({ )}

    -
    - {currentTeam?.modifiers?.personsOnEventsMode === 'person_id_override_properties_joined' ? ( - <> - -

    - You can speed this query up by changing the{' '} - person properties mode setting. -

    - - ) : ( - <> - -

    - {suggestedSamplingPercentage && !samplingPercentage ? ( - - Need to speed things up? Try reducing the date range, removing breakdowns, or - turning on . - - ) : suggestedSamplingPercentage && samplingPercentage ? ( - <> - Still waiting around? You must have lots of data! Kick it up a notch with{' '} - . Or try reducing the date range and - removing breakdowns. - - ) : ( - <>Need to speed things up? Try reducing the date range or removing breakdowns. - )} -

    - - )} -
    + {suggestion ? ( + suggestion + ) : ( +
    + +

    Need to speed things up? Try reducing the date range.

    +
    + )} {queryId ? (
    Query ID: {queryId} @@ -189,6 +163,66 @@ export function InsightLoadingState({ ) } +export function InsightLoadingState({ + queryId, + insightProps, +}: { + queryId?: string | null + insightProps: InsightLogicProps +}): JSX.Element { + const { suggestedSamplingPercentage, samplingPercentage } = useValues(samplingFilterLogic(insightProps)) + const { insightPollResponse } = useValues(insightDataLogic(insightProps)) + const { currentTeam } = useValues(teamLogic) + + const personsOnEventsMode = + currentTeam?.modifiers?.personsOnEventsMode ?? currentTeam?.default_modifiers?.personsOnEventsMode ?? 'disabled' + + return ( +
    + + {personsOnEventsMode === 'person_id_override_properties_joined' ? ( + <> + +

    + You can speed this query up by changing the{' '} + person properties mode{' '} + setting. +

    + + ) : ( + <> + +

    + {suggestedSamplingPercentage && !samplingPercentage ? ( + + Need to speed things up? Try reducing the date range, removing breakdowns, + or turning on . + + ) : suggestedSamplingPercentage && samplingPercentage ? ( + <> + Still waiting around? You must have lots of data! Kick it up a notch with{' '} + . Or try reducing the date range + and removing breakdowns. + + ) : ( + <> + Need to speed things up? Try reducing the date range or removing breakdowns. + + )} +

    + + )} +
    + } + /> +
    + ) +} + export function InsightTimeoutState({ queryId }: { queryId?: string | null }): JSX.Element { const { openSupportForm } = useActions(supportLogic) diff --git a/frontend/src/scenes/insights/insightVizDataLogic.ts b/frontend/src/scenes/insights/insightVizDataLogic.ts index aec4a1eb32ed8..14b0b4cbd393d 100644 --- a/frontend/src/scenes/insights/insightVizDataLogic.ts +++ b/frontend/src/scenes/insights/insightVizDataLogic.ts @@ -100,6 +100,7 @@ export const insightVizDataLogic = kea([ updateDisplay: (display: ChartDisplayType | undefined) => ({ display }), updateHiddenLegendIndexes: (hiddenLegendIndexes: number[] | undefined) => ({ hiddenLegendIndexes }), setTimedOutQueryId: (id: string | null) => ({ id }), + setIsIntervalManuallySet: (isIntervalManuallySet: boolean) => ({ isIntervalManuallySet }), }), reducers({ @@ -109,6 +110,18 @@ export const insightVizDataLogic = kea([ setTimedOutQueryId: (_, { id }) => id, }, ], + + // Whether the interval has been manually set by the user. If true, prevents auto-adjusting the interval when date range changes. Reference: https://github.com/PostHog/posthog/issues/22785 + isIntervalManuallySet: [ + false, + { + updateQuerySource: (state, { querySource }) => { + // If interval is explicitly included in the update, mark it as manually set + return 'interval' in querySource ? true : state + }, + setIsIntervalManuallySet: (_, { isIntervalManuallySet }) => isIntervalManuallySet, + }, + ], }), selectors({ @@ -332,7 +345,7 @@ export const insightVizDataLogic = kea([ // We use 512 for query timeouts // Async queries put the error message on data.error_message, while synchronous ones use detail return insightDataError?.status === 400 || insightDataError?.status === 512 - ? (insightDataError.detail || insightDataError.data?.error_message)?.replace('Try ', 'Try ') // Add unbreakable space for better line breaking + ? (insightDataError.detail || insightDataError.data?.error_message)?.replace('Try ', 'Try ') // Add unbreakable space for better line breaking : null }, ], @@ -401,7 +414,11 @@ export const insightVizDataLogic = kea([ ...values.query, source: { ...values.querySource, - ...handleQuerySourceUpdateSideEffects(querySource, values.querySource as InsightQueryNode), + ...handleQuerySourceUpdateSideEffects( + querySource, + values.querySource as InsightQueryNode, + values.isIntervalManuallySet + ), }, } as Node) }, @@ -487,7 +504,8 @@ const getActiveUsersMath = ( const handleQuerySourceUpdateSideEffects = ( update: QuerySourceUpdate, - currentState: InsightQueryNode + currentState: InsightQueryNode, + isIntervalManuallySet: boolean ): QuerySourceUpdate => { const mergedUpdate = { ...update } as InsightQueryNode @@ -536,7 +554,8 @@ const handleQuerySourceUpdateSideEffects = ( update.dateRange && update.dateRange.date_from && (update.dateRange.date_from !== currentState.dateRange?.date_from || - update.dateRange.date_to !== currentState.dateRange?.date_to) + update.dateRange.date_to !== currentState.dateRange?.date_to) && + !isIntervalManuallySet // Only auto-adjust interval if not manually set ) { const { date_from, date_to } = { ...currentState.dateRange, ...update.dateRange } diff --git a/frontend/src/scenes/max/Intro.tsx b/frontend/src/scenes/max/Intro.tsx index c43cd86b53d2a..97f4f9fbfdc56 100644 --- a/frontend/src/scenes/max/Intro.tsx +++ b/frontend/src/scenes/max/Intro.tsx @@ -3,6 +3,7 @@ import { LemonButton, Popover } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { HedgehogBuddy } from 'lib/components/HedgehogBuddy/HedgehogBuddy' import { hedgehogBuddyLogic } from 'lib/components/HedgehogBuddy/hedgehogBuddyLogic' +import { uuid } from 'lib/utils' import { useMemo, useState } from 'react' import { maxGlobalLogic } from './maxGlobalLogic' @@ -19,13 +20,13 @@ export function Intro(): JSX.Element { const { hedgehogConfig } = useValues(hedgehogBuddyLogic) const { acceptDataProcessing } = useActions(maxGlobalLogic) const { dataProcessingAccepted } = useValues(maxGlobalLogic) - const { sessionId } = useValues(maxLogic) + const { conversation } = useValues(maxLogic) const [hedgehogDirection, setHedgehogDirection] = useState<'left' | 'right'>('right') const headline = useMemo(() => { - return HEADLINES[parseInt(sessionId.split('-').at(-1) as string, 16) % HEADLINES.length] - }, []) + return HEADLINES[parseInt((conversation?.id || uuid()).split('-').at(-1) as string, 16) % HEADLINES.length] + }, [conversation?.id]) return ( <> diff --git a/frontend/src/scenes/max/Max.stories.tsx b/frontend/src/scenes/max/Max.stories.tsx index bec5a519de8e0..51dc03ab0cb5c 100644 --- a/frontend/src/scenes/max/Max.stories.tsx +++ b/frontend/src/scenes/max/Max.stories.tsx @@ -6,7 +6,13 @@ import { projectLogic } from 'scenes/projectLogic' import { mswDecorator, useStorybookMocks } from '~/mocks/browser' -import { chatResponseChunk, failureChunk, generationFailureChunk } from './__mocks__/chatResponse.mocks' +import { + chatResponseChunk, + CONVERSATION_ID, + failureChunk, + generationFailureChunk, + humanMessage, +} from './__mocks__/chatResponse.mocks' import { MaxInstance } from './Max' import { maxGlobalLogic } from './maxGlobalLogic' import { maxLogic } from './maxLogic' @@ -16,7 +22,7 @@ const meta: Meta = { decorators: [ mswDecorator({ post: { - '/api/environments/:team_id/query/chat/': (_, res, ctx) => res(ctx.text(chatResponseChunk)), + '/api/environments/:team_id/conversations/': (_, res, ctx) => res(ctx.text(chatResponseChunk)), }, }), ], @@ -28,10 +34,7 @@ const meta: Meta = { } export default meta -// The session ID is hard-coded here, as it's used for randomizing the welcome headline -const SESSION_ID = 'b1b4b3b4-1b3b-4b3b-1b3b4b3b4b3b' - -const Template = ({ sessionId: SESSION_ID }: { sessionId: string }): JSX.Element => { +const Template = ({ conversationId: CONVERSATION_ID }: { conversationId: string }): JSX.Element => { const { acceptDataProcessing } = useActions(maxGlobalLogic) useEffect(() => { @@ -40,7 +43,7 @@ const Template = ({ sessionId: SESSION_ID }: { sessionId: string }): JSX.Element return (
    - +
    @@ -69,7 +72,7 @@ export const Welcome: StoryFn = () => { acceptDataProcessing(false) }, []) - return